repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
lavizhao/insummer | code/script/cal_rouge.py | 1 | 1665 | #!/usr/bin/python3
import os
ddir = "/home/lavi/project/insummer/duc/Rouge_scores/"
from pprint import pprint
def main(min_rouge1f):
total = []
count = 0
for fname in os.listdir(ddir):
fname = ddir+fname
f = open(fname)
count += 1
oner = {}
for line in f:
if len(line) <= 3 or line.startswith('-'):
continue
else:
rs = line.split()
name = rs[1] + " " + rs[2]
score = float(rs[3])
oner[name] = score
total.append(oner)
print("总文档数",count)
#遍历一遍找到大于rouge1f的值的所有文档
ntotal = []
for oner in total:
r1f = oner["ROUGE-1 Average_F:"]
if r1f > min_rouge1f:
ntotal.append(oner)
count = len(ntotal)
ans = {'ROUGE-1 Average_R:' :0,'ROUGE-1 Average_F:' :0,'ROUGE-1 Average_P:' :0,\
'ROUGE-2 Average_R:' :0,'ROUGE-2 Average_F:' :0,'ROUGE-2 Average_P:' :0,\
'ROUGE-SU4 Average_R:':0,'ROUGE-SU4 Average_F:':0,'ROUGE-SU4 Average_P:':0,}
print("过滤后文档数",count)
for oner in ntotal:
for rname in ans:
ans[rname] += oner[rname]
for rname in ans:
ans[rname] /= count
pprint(ans)
from optparse import OptionParser
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-f", "--filter", dest="rouge1",help="rouge1f最小值",default="-1")
(options, args) = parser.parse_args()
print("rouge1最小值",options.rouge1)
main(float(options.rouge1))
| mit | -1,596,313,816,891,833,000 | 21.9 | 87 | 0.516532 | false |
stryder199/RyarkAssignments | Assignment2/web2py/scripts/contentparser.py | 2 | 4022 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import cStringIO
import re
import sys
import tarfile
import urllib
import xml.parsers.expat as expat
"""
Update script for contenttype.py module.
Usage: python contentupdate.py /path/to/contenttype.py
If no path is specified, script will look for contenttype.py in current
working directory.
Internet connection is required to perform the update.
"""
OVERRIDE = [
('.pdb', 'chemical/x-pdb'),
('.xyz', 'chemical/x-pdb')
]
class MIMEParser(dict):
def __start_element_handler(self, name, attrs):
if name == 'mime-type':
if self.type:
for extension in self.extensions:
self[extension] = self.type
self.type = attrs['type'].lower()
self.extensions = []
elif name == 'glob':
pattern = attrs['pattern']
if pattern.startswith('*.'):
self.extensions.append(pattern[1:].lower())
def __init__(self, fileobj):
dict.__init__(self)
self.type = ''
self.extensions = ''
parser = expat.ParserCreate()
parser.StartElementHandler = self.__start_element_handler
parser.ParseFile(fileobj)
for extension, contenttype in OVERRIDE:
self[extension] = contenttype
if __name__ == '__main__':
try:
path = sys.argv[1]
except:
path = 'contenttype.py'
vregex = re.compile('database version (?P<version>.+?)\.?\n')
sys.stdout.write('Checking contenttype.py database version:')
sys.stdout.flush()
try:
current = open(path).read()
cversion = re.search(vregex, current).group('version')
sys.stdout.write('\t[OK] version %s\n' % cversion)
except Exception, e:
sys.stdout.write('\t[ERROR] %s\n' % e)
exit()
sys.stdout.write('Checking freedesktop.org database version:')
sys.stdout.flush()
try:
search = re.search('(?P<url>http://freedesktop.org/.+?/shared-mime-info-(?P<version>.+?)\.tar\.(?P<type>[gb]z2?))',
urllib.urlopen('http://www.freedesktop.org/wiki/Software/shared-mime-info').read())
url = search.group('url')
assert url != None
nversion = search.group('version')
assert nversion != None
ftype = search.group('type')
assert ftype != None
sys.stdout.write('\t[OK] version %s\n' % nversion)
except:
sys.stdout.write('\t[ERROR] unknown version\n')
exit()
if cversion == nversion:
sys.stdout.write('\nContenttype.py database is up to date\n')
exit()
try:
raw_input('\nContenttype.py database updates are available from:\n%s (approx. 0.5MB)\nPress enter to continue or CTRL-C to quit now\nWARNING: this will replace contenttype.py file content IN PLACE' % url)
except:
exit()
sys.stdout.write('\nDownloading new database:')
sys.stdout.flush()
fregex = re.compile('^.*/freedesktop\.org\.xml$')
try:
io = cStringIO.StringIO()
io.write(urllib.urlopen(url).read())
sys.stdout.write('\t[OK] done\n')
except Exception, e:
sys.stdout.write('\t[ERROR] %s\n' % e)
exit()
sys.stdout.write('Installing new database:')
sys.stdout.flush()
try:
tar = tarfile.TarFile.open(fileobj=io, mode='r:%s' % ftype)
for content in tar.getnames():
if fregex.match(content):
xml = tar.extractfile(content)
break
data = MIMEParser(xml)
io = cStringIO.StringIO()
io.write('CONTENT_TYPE = {\n')
for key in sorted(data):
io.write(' \'%s\': \'%s\',\n' % (key, data[key]))
io.write(' }')
io.seek(0)
open('contenttype.py', 'w').write(re.sub(vregex, 'database version %s.\n' % nversion, re.sub('CONTENT_TYPE = \{(.|\n)+?\}', io.getvalue(), current)))
sys.stdout.write('\t\t\t[OK] done\n')
except Exception, e:
sys.stdout.write('\t\t\t[ERROR] %s\n' % e)
| mit | 62,053,787,865,864,980 | 33.084746 | 212 | 0.582795 | false |
ronekko/chainer | chainer/functions/loss/sigmoid_cross_entropy.py | 2 | 5913 | import numpy
import chainer
from chainer.backends import cuda
from chainer import function_node
from chainer.functions.activation import sigmoid
from chainer import utils
from chainer.utils import type_check
class SigmoidCrossEntropy(function_node.FunctionNode):
"""Sigmoid activation followed by a sigmoid cross entropy loss."""
ignore_label = -1
def __init__(self, normalize=True, reduce='mean'):
self.normalize = normalize
if reduce not in ('mean', 'no'):
raise ValueError(
"only 'mean' and 'no' are valid for 'reduce', but '%s' is "
'given' % reduce)
self.reduce = reduce
self.count = None
def check_type_forward(self, in_types):
type_check.argname(in_types, ('x', 't'))
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype.kind == 'i',
x_type.shape == t_type.shape
)
def forward(self, inputs):
self.retain_inputs((0, 1))
xp = cuda.get_array_module(*inputs)
x, t = inputs
self.ignore_mask = (t != self.ignore_label)
# stable computation of the cross entropy.
loss = -(
self.ignore_mask *
(x * (t - (x >= 0)) - xp.log1p(xp.exp(-xp.abs(x)))))
if not self.reduce == 'mean':
return utils.force_array(loss.astype(x.dtype)),
if self.normalize:
count = xp.maximum(1, self.ignore_mask.sum())
else:
count = max(1, len(x))
self.count = count
return utils.force_array(
xp.divide(xp.sum(loss), self.count, dtype=x.dtype)),
def backward(self, inputs, grad_outputs):
x, t = self.get_retained_inputs()
gy, = grad_outputs
gx, = SigmoidCrossEntropyGrad(
self.reduce, self.count, self.ignore_mask, t.data).apply((x, gy))
return gx, None
class SigmoidCrossEntropyGrad(function_node.FunctionNode):
"""Sigmoid cross entropy gradient function."""
def __init__(self, reduce, count, ignore_mask, t):
self.reduce = reduce
self.count = count
self.ignore_mask = ignore_mask
self.t = t
def forward(self, inputs):
self.retain_inputs((0, 1))
xp = cuda.get_array_module(*inputs)
x, gy = inputs
y, = sigmoid.Sigmoid().forward((x,))
if self.reduce == 'mean':
gx = xp.divide(
gy * self.ignore_mask * (y - self.t), self.count,
dtype=y.dtype)
else:
gx = (gy * self.ignore_mask * (y - self.t)).astype(y.dtype)
return gx,
def backward(self, indexes, grad_outputs):
ggx, = grad_outputs
x, gy = self.get_retained_inputs()
y = chainer.functions.sigmoid(x)
yp = y * (1 - y)
gx = yp * chainer.functions.broadcast_to(gy, yp.shape)
ggy = y - self.t.astype(y.dtype)
gx *= self.ignore_mask * ggx
ggy *= self.ignore_mask * ggx
if self.reduce == 'mean':
gx /= self.count
ggy = chainer.functions.sum(ggy) / self.count
return gx, ggy
def sigmoid_cross_entropy(x, t, normalize=True, reduce='mean'):
"""Computes cross entropy loss for pre-sigmoid activations.
Args:
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`): A variable object holding a matrix whose
(i, j)-th element indicates the unnormalized log probability of
the j-th unit at the i-th example.
t (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`): A variable object holding a matrix whose
(i, j)-th element indicates a signed integer vector of
ground truth labels 0 or 1.
If ``t[i, j] == -1``, corresponding ``x[i, j]`` is ignored.
Loss is zero if all ground truth labels are ``-1``.
normalize (bool): Variable holding a boolean value which
determines the normalization constant. If true, this function
normalizes the cross entropy loss across all instances. If else,
it only normalizes along a batch size.
reduce (str): Variable holding a ``str`` which
determines whether to reduce the shape of the input.
If it is ``'mean'``, it computes the sum of cross entropy
and normalize it according to ``normalize`` option.
If is is ``'no'``, this function computes cross entropy for each
instance and does not normalize it (``normalize`` option is
ignored). In this case, the loss value of the ignored instance,
which has ``-1`` as its target value, is set to ``0``.
Returns:
Variable: A variable object holding an array of the cross entropy.
If ``reduce`` is ``'mean'``, it is a scalar array.
If ``reduce`` is ``'no'``, the shape is same as ``x``.
.. note::
This function is differentiable only by ``x``.
.. admonition:: Example
>>> x = np.array([[-2.0, 3.0, 0.5], [5.0, 2.0, -0.5]]).\
astype(np.float32)
>>> x
array([[-2. , 3. , 0.5],
[ 5. , 2. , -0.5]], dtype=float32)
>>> t = np.array([[0, 1, 0], [1, 1, -1]]).astype(np.int32)
>>> t
array([[ 0, 1, 0],
[ 1, 1, -1]], dtype=int32)
>>> F.sigmoid_cross_entropy(x, t)
variable(0.25664714)
>>> F.sigmoid_cross_entropy(x, t, normalize=False)
variable(0.64161783)
>>> y = F.sigmoid_cross_entropy(x, t, reduce='no')
>>> y.shape
(2, 3)
>>> y.data
array([[ 0.126928 , 0.04858735, 0.974077 ],
[ 0.00671535, 0.126928 , -0. ]], dtype=float32)
"""
return SigmoidCrossEntropy(normalize, reduce).apply((x, t))[0]
| mit | 4,776,691,069,655,738,000 | 33.782353 | 77 | 0.560122 | false |
desirable-objects/hotwire-shell | hotwire/builtins/sechash.py | 4 | 2227 | # This file is part of the Hotwire Shell project API.
# Copyright (C) 2007 Colin Walters <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE X CONSORTIUM BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
# THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os,sys,md5,sha
import hotwire
from hotwire.builtin import builtin_hotwire, InputStreamSchema
from hotwire.fs import FilePath
from hotwire.sysdep.fs import Filesystem
@builtin_hotwire(idempotent=True,
input=InputStreamSchema('any', optional=True),
output=str,
options=[['-5', '--md5'],])
def sechash(context, *files):
_("""Create a secure hash (default SHA1) from objects or file arguments.""")
alg = ('-5' in context.options) and md5 or sha
fs = Filesystem.getInstance()
if (not files) and context.input:
for val in context.input:
valstr = str(val)
hashval = alg.new()
hashval.update(valstr)
yield hashval.hexdigest()
for arg in files:
fpath = FilePath(arg, context.cwd)
stream = open(fpath)
hashval = alg.new()
buf = stream.read(4096)
while buf:
hashval.update(buf)
buf = stream.read(4096)
stream.close()
yield hashval.hexdigest()
| gpl-2.0 | -7,527,450,221,668,916,000 | 41.826923 | 86 | 0.684329 | false |
bwhmather/verktyg | verktyg/testsuite/test_dispatch.py | 1 | 4486 | """
verktyg.testsuite.dispatch
~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests for the core dispatcher.
:copyright: (c) 2014 by Ben Mather.
:license: BSD, see LICENSE for more details.
"""
import unittest
from verktyg.exceptions import NotImplemented, MethodNotAllowed, NotAcceptable
from verktyg.dispatch import Binding, Dispatcher
def _make_view(value):
def view(app, req):
return value
return view
class DispatchTestCase(unittest.TestCase):
def test_name_dispatch(self):
dispatcher = Dispatcher([
Binding('tweedle-dum', _make_view('Tweedle Dum')),
Binding('tweedle-dee', _make_view('Tweedle Dee')),
Binding('same', _make_view('overridden')),
Binding('same', _make_view('overriding')),
])
self.assertEqual(
'Tweedle Dum', dispatcher.lookup('tweedle-dum')(None, None)
)
self.assertEqual(
'Tweedle Dee', dispatcher.lookup('tweedle-dee')(None, None)
)
self.assertRaises(NotImplemented, dispatcher.lookup, 'non-existant')
self.assertEqual(
'overriding', dispatcher.lookup('same')(None, None)
)
def test_method_dispatch(self):
dispatcher = Dispatcher([
Binding('test', _make_view('get'), method='GET'),
Binding('test', _make_view('post'), method='POST'),
Binding('head', _make_view('head'), method='HEAD'),
Binding('no-head', _make_view('get'), method='GET'),
])
# default to 'GET'
self.assertEqual(
'get', dispatcher.lookup('test')(None, None)
)
self.assertEqual(
'get', dispatcher.lookup('test', method='GET')(None, None)
)
# `POST` gives something different
self.assertEqual(
'post', dispatcher.lookup('test', method='POST')(None, None)
)
# `PUT` not found
self.assertRaises(
MethodNotAllowed, dispatcher.lookup, 'test', method='PUT'
)
def test_head_fallback(self):
dispatcher = Dispatcher([
Binding('head', _make_view('head'), method='HEAD'),
Binding('no-head', _make_view('get'), method='GET'),
])
# `HEAD` should fall back to `GET`
self.assertEqual(
'head', dispatcher.lookup('head', method='HEAD')(None, None)
)
self.assertEqual(
'get', dispatcher.lookup('no-head', method='HEAD')(None, None)
)
def test_method_override(self):
dispatcher = Dispatcher([
Binding('same', _make_view('overridden')),
Binding('same', _make_view('unaffected'), method='POST'),
Binding('same', _make_view('overriding')),
])
# replacing handler for one method should not affect others
self.assertEqual(
'overriding', dispatcher.lookup('same')(None, None)
)
self.assertEqual(
'unaffected', dispatcher.lookup('same', method='POST')(None, None)
)
def test_accept_dispatch(self):
dispatcher = Dispatcher([
Binding(
'test', _make_view('json'),
content_type='application/json'
),
Binding(
'test', _make_view('html'),
content_type='text/html'
),
Binding(
'test', _make_view('whatever')
),
Binding(
'nope', _make_view('nope'),
content_type='application/xml'
),
])
# accept header strings
self.assertEqual(
'json',
dispatcher.lookup('test', accept='application/json')(None, None)
)
self.assertEqual(
'json',
dispatcher.lookup(
'test', accept='application/json; q=0.9, text/html; q=0.8'
)(None, None)
)
self.assertEqual(
'whatever',
dispatcher.lookup('test', accept='application/xml')(None, None)
)
self.assertRaises(
NotAcceptable,
dispatcher.lookup, 'nope', accept='text/html'
)
def test_nested(self):
child = Dispatcher([
Binding('nested', _make_view('Nested')),
])
parent = Dispatcher([
child,
])
self.assertEqual(
'Nested',
parent.lookup('nested')(None, None))
| bsd-3-clause | -4,829,780,922,076,107,000 | 29.310811 | 78 | 0.525858 | false |
tbenthompson/rupturotops | source/linear_viscoelastic/simple_elastic.py | 1 | 1904 | import numpy as np
from core.debug import _DEBUG
from matplotlib import pyplot as pyp
def elastic_stress(x, y, s, D, shear_modulus):
"""
Use the elastic half-space stress solution from Segall (2010)
"""
# TEST THIS!
factor = (s * shear_modulus) / (2 * np.pi)
main_term = -(y - D) / ((y - D) ** 2 + x ** 2)
image_term = (y + D) / ((y + D) ** 2 + x ** 2)
Szx = factor * (main_term + image_term)
main_term = x / (x ** 2 + (y - D) ** 2)
image_term = -x / (x ** 2 + (y + D) ** 2)
Szy = factor * (main_term + image_term)
return Szx, Szy
def calc_divergence(f, g, dx, dy):
"""
Calculate the discrete divergence of a 2D vector field.
This should be moved to some other location.
"""
dfdx = (np.roll(f, -1, 1) - np.roll(f, 1, 1)) / (2 * dx)
dgdy = (np.roll(g, -1, 0) - np.roll(g, 1, 0)) / (2 * dy)
return (dfdx + dgdy)[1:-1, 1:-1]
def calc_curl(f, dx, dy):
"""
Calculates the discrete curl of a vector field with no component
in x and y.
"""
dfdx = (np.roll(f, -1, 1) - np.roll(f, 1, 1)) / (2 * dx)
dfdy = (np.roll(f, -1, 0) - np.roll(f, 1, 0)) / (2 * dy)
return dfdy[1:-1, 1:-1], -dfdx[1:-1, 1:-1]
def test_div_curl():
# The curl of anything should be divergence free
z = np.random.random_sample((50, 50))
curl = calc_curl(z, 1.0, 1.0)
div = calc_divergence(curl[0], curl[1], 1.0, 1.0)
assert(np.mean(np.abs(div)) < np.mean(np.abs(curl[0])) / 1000.0)
def test_elastic_stress():
# Here, I just test that its divergence free.
# Incomplete test, but better than nothing.
x = np.linspace(1, 10, 40)
y = np.linspace(0, 9, 40)
x, y = np.meshgrid(x, y)
s = 1.0
D = 10.0
mu = 1000000
f = elastic_stress(x, y, s, D, mu)
divf = calc_divergence(f[0], f[1], 9.0 / 40.0, 9.0 / 40.0)
assert(np.mean(np.abs(divf)) < np.mean(np.abs(f[0])) / 1000.0)
| lgpl-3.0 | 4,818,788,601,795,321,000 | 30.213115 | 68 | 0.544643 | false |
dongjoon-hyun/tensorflow | tensorflow/python/kernel_tests/self_adjoint_eig_op_test.py | 3 | 9513 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_inverse."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def _AddTest(test_class, op_name, testcase_name, fn):
test_name = "_".join(["test", op_name, testcase_name])
if hasattr(test_class, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(test_class, test_name, fn)
class SelfAdjointEigTest(test.TestCase):
def testWrongDimensions(self):
# The input to self_adjoint_eig should be a tensor of
# at least rank 2.
scalar = constant_op.constant(1.)
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(scalar)
vector = constant_op.constant([1., 2.])
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(vector)
def testConcurrentExecutesWithoutError(self):
all_ops = []
with self.session(use_gpu=True) as sess:
for compute_v_ in True, False:
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
if compute_v_:
e1, v1 = linalg_ops.self_adjoint_eig(matrix1)
e2, v2 = linalg_ops.self_adjoint_eig(matrix2)
all_ops += [e1, v1, e2, v2]
else:
e1 = linalg_ops.self_adjoint_eigvals(matrix1)
e2 = linalg_ops.self_adjoint_eigvals(matrix2)
all_ops += [e1, e2]
val = sess.run(all_ops)
self.assertAllEqual(val[0], val[2])
# The algorithm is slightly different for compute_v being True and False,
# so require approximate equality only here.
self.assertAllClose(val[2], val[4])
self.assertAllEqual(val[4], val[5])
self.assertAllEqual(val[1], val[3])
def testMatrixThatFailsWhenFlushingDenormsToZero(self):
# Test a 32x32 matrix which is known to fail if denorm floats are flushed to
# zero.
matrix = np.genfromtxt(
test.test_src_dir_path(
"python/kernel_tests/testdata/"
"self_adjoint_eig_fail_if_denorms_flushed.txt")).astype(np.float32)
self.assertEqual(matrix.shape, (32, 32))
matrix_tensor = constant_op.constant(matrix)
with self.session(use_gpu=True) as sess:
(e, v) = sess.run(linalg_ops.self_adjoint_eig(matrix_tensor))
self.assertEqual(e.size, 32)
self.assertAllClose(
np.matmul(v, v.transpose()), np.eye(32, dtype=np.float32), atol=2e-3)
self.assertAllClose(matrix,
np.matmul(np.matmul(v, np.diag(e)), v.transpose()))
def SortEigenDecomposition(e, v):
if v.ndim < 2:
return e, v
else:
perm = np.argsort(e, -1)
return np.take(e, perm, -1), np.take(v, perm, -1)
def EquilibrateEigenVectorPhases(x, y):
"""Equilibrate the phase of the Eigenvectors in the columns of `x` and `y`.
Eigenvectors are only unique up to an arbitrary phase. This function rotates x
such that it matches y. Precondition: The coluns of x and y differ by a
multiplicative complex phase factor only.
Args:
x: `np.ndarray` with Eigenvectors
y: `np.ndarray` with Eigenvectors
Returns:
`np.ndarray` containing an equilibrated version of x.
"""
phases = np.sum(np.conj(x) * y, -2, keepdims=True)
phases /= np.abs(phases)
return phases * x
def _GetSelfAdjointEigTest(dtype_, shape_, compute_v_):
def CompareEigenVectors(self, x, y, tol):
x = EquilibrateEigenVectorPhases(x, y)
self.assertAllClose(x, y, atol=tol)
def CompareEigenDecompositions(self, x_e, x_v, y_e, y_v, tol):
num_batches = int(np.prod(x_e.shape[:-1]))
n = x_e.shape[-1]
x_e = np.reshape(x_e, [num_batches] + [n])
x_v = np.reshape(x_v, [num_batches] + [n, n])
y_e = np.reshape(y_e, [num_batches] + [n])
y_v = np.reshape(y_v, [num_batches] + [n, n])
for i in range(num_batches):
x_ei, x_vi = SortEigenDecomposition(x_e[i, :], x_v[i, :, :])
y_ei, y_vi = SortEigenDecomposition(y_e[i, :], y_v[i, :, :])
self.assertAllClose(x_ei, y_ei, atol=tol, rtol=tol)
CompareEigenVectors(self, x_vi, y_vi, tol)
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a += np.conj(a.T)
a = np.tile(a, batch_shape + (1, 1))
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
atol = 1e-4
else:
atol = 1e-12
np_e, np_v = np.linalg.eigh(a)
with self.session(use_gpu=True):
if compute_v_:
tf_e, tf_v = linalg_ops.self_adjoint_eig(constant_op.constant(a))
# Check that V*diag(E)*V^T is close to A.
a_ev = math_ops.matmul(
math_ops.matmul(tf_v, array_ops.matrix_diag(tf_e)),
tf_v,
adjoint_b=True)
self.assertAllClose(a_ev.eval(), a, atol=atol)
# Compare to numpy.linalg.eigh.
CompareEigenDecompositions(self, np_e, np_v,
tf_e.eval(), tf_v.eval(), atol)
else:
tf_e = linalg_ops.self_adjoint_eigvals(constant_op.constant(a))
self.assertAllClose(
np.sort(np_e, -1), np.sort(tf_e.eval(), -1), atol=atol)
return Test
class SelfAdjointEigGradTest(test.TestCase):
pass # Filled in below
def _GetSelfAdjointEigGradTest(dtype_, shape_, compute_v_):
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a += np.conj(a.T)
a = np.tile(a, batch_shape + (1, 1))
# Optimal stepsize for central difference is O(epsilon^{1/3}).
epsilon = np.finfo(np_dtype).eps
delta = 0.1 * epsilon**(1.0 / 3.0)
# tolerance obtained by looking at actual differences using
# np.linalg.norm(theoretical-numerical, np.inf) on -mavx build
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
tol = 1e-2
else:
tol = 1e-7
with self.session(use_gpu=True):
tf_a = constant_op.constant(a)
if compute_v_:
tf_e, tf_v = linalg_ops.self_adjoint_eig(tf_a)
# (complex) Eigenvectors are only unique up to an arbitrary phase
# We normalize the vectors such that the first component has phase 0.
top_rows = tf_v[..., 0:1, :]
if tf_a.dtype.is_complex:
angle = -math_ops.angle(top_rows)
phase = math_ops.complex(math_ops.cos(angle), math_ops.sin(angle))
else:
phase = math_ops.sign(top_rows)
tf_v *= phase
outputs = [tf_e, tf_v]
else:
tf_e = linalg_ops.self_adjoint_eigvals(tf_a)
outputs = [tf_e]
for b in outputs:
x_init = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
x_init += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
x_init += np.conj(x_init.T)
x_init = np.tile(x_init, batch_shape + (1, 1))
theoretical, numerical = gradient_checker.compute_gradient(
tf_a,
tf_a.get_shape().as_list(),
b,
b.get_shape().as_list(),
x_init_value=x_init,
delta=delta)
self.assertAllClose(theoretical, numerical, atol=tol, rtol=tol)
return Test
if __name__ == "__main__":
for compute_v in True, False:
for dtype in (dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.complex64,
dtypes_lib.complex128):
for size in 1, 2, 5, 10:
for batch_dims in [(), (3,)] + [(3, 2)] * (max(size, size) < 10):
shape = batch_dims + (size, size)
name = "%s_%s_%s" % (dtype, "_".join(map(str, shape)), compute_v)
_AddTest(SelfAdjointEigTest, "SelfAdjointEig", name,
_GetSelfAdjointEigTest(dtype, shape, compute_v))
_AddTest(SelfAdjointEigGradTest, "SelfAdjointEigGrad", name,
_GetSelfAdjointEigGradTest(dtype, shape, compute_v))
test.main()
| apache-2.0 | 5,716,619,083,425,049,000 | 36.600791 | 80 | 0.618837 | false |
neumerance/cloudloon2 | .venv/lib/python2.7/site-packages/cmd2.py | 5 | 64971 | """Variant on standard library's cmd with extra features.
To use, simply import cmd2.Cmd instead of cmd.Cmd; use precisely as though you
were using the standard library's cmd, while enjoying the extra features.
Searchable command history (commands: "hi", "li", "run")
Load commands from file, save to file, edit commands in file
Multi-line commands
Case-insensitive commands
Special-character shortcut commands (beyond cmd's "@" and "!")
Settable environment parameters
Optional _onchange_{paramname} called when environment parameter changes
Parsing commands with `optparse` options (flags)
Redirection to file with >, >>; input from file with <
Easy transcript-based testing of applications (see example/example.py)
Bash-style ``select`` available
Note that redirection with > and | will only work if `self.stdout.write()`
is used in place of `print`. The standard library's `cmd` module is
written to use `self.stdout.write()`,
- Catherine Devlin, Jan 03 2008 - catherinedevlin.blogspot.com
mercurial repository at http://www.assembla.com/wiki/show/python-cmd2
"""
import cmd
import re
import os
import sys
import optparse
import subprocess
import tempfile
import doctest
import unittest
import datetime
import urllib
import glob
import traceback
import platform
import copy
from code import InteractiveConsole, InteractiveInterpreter
from optparse import make_option
import pyparsing
__version__ = '0.6.6.1'
if sys.version_info[0] == 2:
pyparsing.ParserElement.enablePackrat()
"""
Packrat is causing Python3 errors that I don't understand.
> /usr/local/Cellar/python3/3.2/lib/python3.2/site-packages/pyparsing-1.5.6-py3.2.egg/pyparsing.py(999)scanString()
-> nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
(Pdb) n
NameError: global name 'exc' is not defined
(Pdb) parseFn
<bound method Or._parseCache of {Python style comment ^ C style comment}>
Bug report filed: https://sourceforge.net/tracker/?func=detail&atid=617311&aid=3381439&group_id=97203
"""
class OptionParser(optparse.OptionParser):
def exit(self, status=0, msg=None):
self.values._exit = True
if msg:
print (msg)
def print_help(self, *args, **kwargs):
try:
print (self._func.__doc__)
except AttributeError:
pass
optparse.OptionParser.print_help(self, *args, **kwargs)
def error(self, msg):
"""error(msg : string)
Print a usage message incorporating 'msg' to stderr and exit.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
raise optparse.OptParseError(msg)
def remaining_args(oldArgs, newArgList):
'''
Preserves the spacing originally in the argument after
the removal of options.
>>> remaining_args('-f bar bar cow', ['bar', 'cow'])
'bar cow'
'''
pattern = '\s+'.join(re.escape(a) for a in newArgList) + '\s*$'
matchObj = re.search(pattern, oldArgs)
return oldArgs[matchObj.start():]
def _attr_get_(obj, attr):
'''Returns an attribute's value, or None (no error) if undefined.
Analagous to .get() for dictionaries. Useful when checking for
value of options that may not have been defined on a given
method.'''
try:
return getattr(obj, attr)
except AttributeError:
return None
optparse.Values.get = _attr_get_
options_defined = [] # used to distinguish --options from SQL-style --comments
def options(option_list, arg_desc="arg"):
'''Used as a decorator and passed a list of optparse-style options,
alters a cmd2 method to populate its ``opts`` argument from its
raw text argument.
Example: transform
def do_something(self, arg):
into
@options([make_option('-q', '--quick', action="store_true",
help="Makes things fast")],
"source dest")
def do_something(self, arg, opts):
if opts.quick:
self.fast_button = True
'''
if not isinstance(option_list, list):
option_list = [option_list]
for opt in option_list:
options_defined.append(pyparsing.Literal(opt.get_opt_string()))
def option_setup(func):
optionParser = OptionParser()
for opt in option_list:
optionParser.add_option(opt)
optionParser.set_usage("%s [options] %s" % (func.__name__[3:], arg_desc))
optionParser._func = func
def new_func(instance, arg):
try:
opts, newArgList = optionParser.parse_args(arg.split())
# Must find the remaining args in the original argument list, but
# mustn't include the command itself
#if hasattr(arg, 'parsed') and newArgList[0] == arg.parsed.command:
# newArgList = newArgList[1:]
newArgs = remaining_args(arg, newArgList)
if isinstance(arg, ParsedString):
arg = arg.with_args_replaced(newArgs)
else:
arg = newArgs
except optparse.OptParseError as e:
print (e)
optionParser.print_help()
return
if hasattr(opts, '_exit'):
return None
result = func(instance, arg, opts)
return result
new_func.__doc__ = '%s\n%s' % (func.__doc__, optionParser.format_help())
return new_func
return option_setup
class PasteBufferError(EnvironmentError):
if sys.platform[:3] == 'win':
errmsg = """Redirecting to or from paste buffer requires pywin32
to be installed on operating system.
Download from http://sourceforge.net/projects/pywin32/"""
elif sys.platform[:3] == 'dar':
# Use built in pbcopy on Mac OSX
pass
else:
errmsg = """Redirecting to or from paste buffer requires xclip
to be installed on operating system.
On Debian/Ubuntu, 'sudo apt-get install xclip' will install it."""
def __init__(self):
Exception.__init__(self, self.errmsg)
pastebufferr = """Redirecting to or from paste buffer requires %s
to be installed on operating system.
%s"""
if subprocess.mswindows:
try:
import win32clipboard
def get_paste_buffer():
win32clipboard.OpenClipboard(0)
try:
result = win32clipboard.GetClipboardData()
except TypeError:
result = '' #non-text
win32clipboard.CloseClipboard()
return result
def write_to_paste_buffer(txt):
win32clipboard.OpenClipboard(0)
win32clipboard.EmptyClipboard()
win32clipboard.SetClipboardText(txt)
win32clipboard.CloseClipboard()
except ImportError:
def get_paste_buffer(*args):
raise OSError(pastebufferr % ('pywin32', 'Download from http://sourceforge.net/projects/pywin32/'))
write_to_paste_buffer = get_paste_buffer
elif sys.platform == 'darwin':
can_clip = False
try:
# test for pbcopy - AFAIK, should always be installed on MacOS
subprocess.check_call('pbcopy -help', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
can_clip = True
except (subprocess.CalledProcessError, OSError, IOError):
pass
if can_clip:
def get_paste_buffer():
pbcopyproc = subprocess.Popen('pbcopy -help', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
return pbcopyproc.stdout.read()
def write_to_paste_buffer(txt):
pbcopyproc = subprocess.Popen('pbcopy', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
pbcopyproc.communicate(txt.encode())
else:
def get_paste_buffer(*args):
raise OSError(pastebufferr % ('pbcopy', 'On MacOS X - error should not occur - part of the default installation'))
write_to_paste_buffer = get_paste_buffer
else:
can_clip = False
try:
subprocess.check_call('xclip -o -sel clip', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
can_clip = True
except AttributeError: # check_call not defined, Python < 2.5
try:
teststring = 'Testing for presence of xclip.'
xclipproc = subprocess.Popen('xclip -sel clip', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
xclipproc.stdin.write(teststring)
xclipproc.stdin.close()
xclipproc = subprocess.Popen('xclip -o -sel clip', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
if xclipproc.stdout.read() == teststring:
can_clip = True
except Exception: # hate a bare Exception call, but exception classes vary too much b/t stdlib versions
pass
except Exception:
pass # something went wrong with xclip and we cannot use it
if can_clip:
def get_paste_buffer():
xclipproc = subprocess.Popen('xclip -o -sel clip', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
return xclipproc.stdout.read()
def write_to_paste_buffer(txt):
xclipproc = subprocess.Popen('xclip -sel clip', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
xclipproc.stdin.write(txt.encode())
xclipproc.stdin.close()
# but we want it in both the "primary" and "mouse" clipboards
xclipproc = subprocess.Popen('xclip', shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
xclipproc.stdin.write(txt.encode())
xclipproc.stdin.close()
else:
def get_paste_buffer(*args):
raise OSError(pastebufferr % ('xclip', 'On Debian/Ubuntu, install with "sudo apt-get install xclip"'))
write_to_paste_buffer = get_paste_buffer
pyparsing.ParserElement.setDefaultWhitespaceChars(' \t')
class ParsedString(str):
def full_parsed_statement(self):
new = ParsedString('%s %s' % (self.parsed.command, self.parsed.args))
new.parsed = self.parsed
new.parser = self.parser
return new
def with_args_replaced(self, newargs):
new = ParsedString(newargs)
new.parsed = self.parsed
new.parser = self.parser
new.parsed['args'] = newargs
new.parsed.statement['args'] = newargs
return new
class StubbornDict(dict):
'''Dictionary that tolerates many input formats.
Create it with stubbornDict(arg) factory function.
>>> d = StubbornDict(large='gross', small='klein')
>>> sorted(d.items())
[('large', 'gross'), ('small', 'klein')]
>>> d.append(['plain', ' plaid'])
>>> sorted(d.items())
[('large', 'gross'), ('plaid', ''), ('plain', ''), ('small', 'klein')]
>>> d += ' girl Frauelein, Maedchen\\n\\n shoe schuh'
>>> sorted(d.items())
[('girl', 'Frauelein, Maedchen'), ('large', 'gross'), ('plaid', ''), ('plain', ''), ('shoe', 'schuh'), ('small', 'klein')]
'''
def update(self, arg):
dict.update(self, StubbornDict.to_dict(arg))
append = update
def __iadd__(self, arg):
self.update(arg)
return self
def __add__(self, arg):
selfcopy = copy.copy(self)
selfcopy.update(stubbornDict(arg))
return selfcopy
def __radd__(self, arg):
selfcopy = copy.copy(self)
selfcopy.update(stubbornDict(arg))
return selfcopy
@classmethod
def to_dict(cls, arg):
'Generates dictionary from string or list of strings'
if hasattr(arg, 'splitlines'):
arg = arg.splitlines()
if hasattr(arg, '__reversed__'):
result = {}
for a in arg:
a = a.strip()
if a:
key_val = a.split(None, 1)
key = key_val[0]
if len(key_val) > 1:
val = key_val[1]
else:
val = ''
result[key] = val
else:
result = arg
return result
def stubbornDict(*arg, **kwarg):
'''
>>> sorted(stubbornDict('cow a bovine\\nhorse an equine').items())
[('cow', 'a bovine'), ('horse', 'an equine')]
>>> sorted(stubbornDict(['badger', 'porcupine a poky creature']).items())
[('badger', ''), ('porcupine', 'a poky creature')]
>>> sorted(stubbornDict(turtle='has shell', frog='jumpy').items())
[('frog', 'jumpy'), ('turtle', 'has shell')]
'''
result = {}
for a in arg:
result.update(StubbornDict.to_dict(a))
result.update(kwarg)
return StubbornDict(result)
def replace_with_file_contents(fname):
if fname:
try:
result = open(os.path.expanduser(fname[0])).read()
except IOError:
result = '< %s' % fname[0] # wasn't a file after all
else:
result = get_paste_buffer()
return result
class EmbeddedConsoleExit(SystemExit):
pass
class EmptyStatement(Exception):
pass
def ljust(x, width, fillchar=' '):
'analogous to str.ljust, but works for lists'
if hasattr(x, 'ljust'):
return x.ljust(width, fillchar)
else:
if len(x) < width:
x = (x + [fillchar] * width)[:width]
return x
class Cmd(cmd.Cmd):
echo = False
case_insensitive = True # Commands recognized regardless of case
continuation_prompt = '> '
timing = False # Prints elapsed time for each command
# make sure your terminators are not in legalChars!
legalChars = u'!#$%.:?@_' + pyparsing.alphanums + pyparsing.alphas8bit
shortcuts = {'?': 'help', '!': 'shell', '@': 'load', '@@': '_relative_load'}
excludeFromHistory = '''run r list l history hi ed edit li eof'''.split()
default_to_shell = False
noSpecialParse = 'set ed edit exit'.split()
defaultExtension = 'txt' # For ``save``, ``load``, etc.
default_file_name = 'command.txt' # For ``save``, ``load``, etc.
abbrev = True # Abbreviated commands recognized
current_script_dir = None
reserved_words = []
feedback_to_output = False # Do include nonessentials in >, | output
quiet = False # Do not suppress nonessential output
debug = False
locals_in_py = True
kept_state = None
redirector = '>' # for sending output to file
settable = stubbornDict('''
prompt
colors Colorized output (*nix only)
continuation_prompt On 2nd+ line of input
debug Show full error stack on error
default_file_name for ``save``, ``load``, etc.
editor Program used by ``edit``
case_insensitive upper- and lower-case both OK
feedback_to_output include nonessentials in `|`, `>` results
quiet Don't print nonessential feedback
echo Echo command issued into output
timing Report execution times
abbrev Accept abbreviated commands
''')
def poutput(self, msg):
'''Convenient shortcut for self.stdout.write(); adds newline if necessary.'''
if msg:
self.stdout.write(msg)
if msg[-1] != '\n':
self.stdout.write('\n')
def perror(self, errmsg, statement=None):
if self.debug:
traceback.print_exc()
print (str(errmsg))
def pfeedback(self, msg):
"""For printing nonessential feedback. Can be silenced with `quiet`.
Inclusion in redirected output is controlled by `feedback_to_output`."""
if not self.quiet:
if self.feedback_to_output:
self.poutput(msg)
else:
print (msg)
_STOP_AND_EXIT = True # distinguish end of script file from actual exit
_STOP_SCRIPT_NO_EXIT = -999
editor = os.environ.get('EDITOR')
if not editor:
if sys.platform[:3] == 'win':
editor = 'notepad'
else:
for editor in ['gedit', 'kate', 'vim', 'vi', 'emacs', 'nano', 'pico']:
if subprocess.Popen(['which', editor], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0]:
break
colorcodes = {'bold':{True:'\x1b[1m',False:'\x1b[22m'},
'cyan':{True:'\x1b[36m',False:'\x1b[39m'},
'blue':{True:'\x1b[34m',False:'\x1b[39m'},
'red':{True:'\x1b[31m',False:'\x1b[39m'},
'magenta':{True:'\x1b[35m',False:'\x1b[39m'},
'green':{True:'\x1b[32m',False:'\x1b[39m'},
'underline':{True:'\x1b[4m',False:'\x1b[24m'}}
colors = (platform.system() != 'Windows')
def colorize(self, val, color):
'''Given a string (``val``), returns that string wrapped in UNIX-style
special characters that turn on (and then off) text color and style.
If the ``colors`` environment paramter is ``False``, or the application
is running on Windows, will return ``val`` unchanged.
``color`` should be one of the supported strings (or styles):
red/blue/green/cyan/magenta, bold, underline'''
if self.colors and (self.stdout == self.initial_stdout):
return self.colorcodes[color][True] + val + self.colorcodes[color][False]
return val
def do_cmdenvironment(self, args):
'''Summary report of interactive parameters.'''
self.stdout.write("""
Commands are %(casesensitive)scase-sensitive.
Commands may be terminated with: %(terminators)s
Settable parameters: %(settable)s\n""" % \
{ 'casesensitive': (self.case_insensitive and 'not ') or '',
'terminators': str(self.terminators),
'settable': ' '.join(self.settable)
})
def do_help(self, arg):
if arg:
funcname = self.func_named(arg)
if funcname:
fn = getattr(self, funcname)
try:
fn.optionParser.print_help(file=self.stdout)
except AttributeError:
cmd.Cmd.do_help(self, funcname[3:])
else:
cmd.Cmd.do_help(self, arg)
def __init__(self, *args, **kwargs):
cmd.Cmd.__init__(self, *args, **kwargs)
self.initial_stdout = sys.stdout
self.history = History()
self.pystate = {}
self.shortcuts = sorted(self.shortcuts.items(), reverse=True)
self.keywords = self.reserved_words + [fname[3:] for fname in dir(self)
if fname.startswith('do_')]
self._init_parser()
def do_shortcuts(self, args):
"""Lists single-key shortcuts available."""
result = "\n".join('%s: %s' % (sc[0], sc[1]) for sc in sorted(self.shortcuts))
self.stdout.write("Single-key shortcuts for other commands:\n%s\n" % (result))
prefixParser = pyparsing.Empty()
commentGrammars = pyparsing.Or([pyparsing.pythonStyleComment, pyparsing.cStyleComment])
commentGrammars.addParseAction(lambda x: '')
commentInProgress = pyparsing.Literal('/*') + pyparsing.SkipTo(
pyparsing.stringEnd ^ '*/')
terminators = [';']
blankLinesAllowed = False
multilineCommands = []
def _init_parser(self):
r'''
>>> c = Cmd()
>>> c.multilineCommands = ['multiline']
>>> c.case_insensitive = True
>>> c._init_parser()
>>> print (c.parser.parseString('').dump())
[]
>>> print (c.parser.parseString('').dump())
[]
>>> print (c.parser.parseString('/* empty command */').dump())
[]
>>> print (c.parser.parseString('plainword').dump())
['plainword', '']
- command: plainword
- statement: ['plainword', '']
- command: plainword
>>> print (c.parser.parseString('termbare;').dump())
['termbare', '', ';', '']
- command: termbare
- statement: ['termbare', '', ';']
- command: termbare
- terminator: ;
- terminator: ;
>>> print (c.parser.parseString('termbare; suffx').dump())
['termbare', '', ';', 'suffx']
- command: termbare
- statement: ['termbare', '', ';']
- command: termbare
- terminator: ;
- suffix: suffx
- terminator: ;
>>> print (c.parser.parseString('barecommand').dump())
['barecommand', '']
- command: barecommand
- statement: ['barecommand', '']
- command: barecommand
>>> print (c.parser.parseString('COMmand with args').dump())
['command', 'with args']
- args: with args
- command: command
- statement: ['command', 'with args']
- args: with args
- command: command
>>> print (c.parser.parseString('command with args and terminator; and suffix').dump())
['command', 'with args and terminator', ';', 'and suffix']
- args: with args and terminator
- command: command
- statement: ['command', 'with args and terminator', ';']
- args: with args and terminator
- command: command
- terminator: ;
- suffix: and suffix
- terminator: ;
>>> print (c.parser.parseString('simple | piped').dump())
['simple', '', '|', ' piped']
- command: simple
- pipeTo: piped
- statement: ['simple', '']
- command: simple
>>> print (c.parser.parseString('double-pipe || is not a pipe').dump())
['double', '-pipe || is not a pipe']
- args: -pipe || is not a pipe
- command: double
- statement: ['double', '-pipe || is not a pipe']
- args: -pipe || is not a pipe
- command: double
>>> print (c.parser.parseString('command with args, terminator;sufx | piped').dump())
['command', 'with args, terminator', ';', 'sufx', '|', ' piped']
- args: with args, terminator
- command: command
- pipeTo: piped
- statement: ['command', 'with args, terminator', ';']
- args: with args, terminator
- command: command
- terminator: ;
- suffix: sufx
- terminator: ;
>>> print (c.parser.parseString('output into > afile.txt').dump())
['output', 'into', '>', 'afile.txt']
- args: into
- command: output
- output: >
- outputTo: afile.txt
- statement: ['output', 'into']
- args: into
- command: output
>>> print (c.parser.parseString('output into;sufx | pipethrume plz > afile.txt').dump())
['output', 'into', ';', 'sufx', '|', ' pipethrume plz', '>', 'afile.txt']
- args: into
- command: output
- output: >
- outputTo: afile.txt
- pipeTo: pipethrume plz
- statement: ['output', 'into', ';']
- args: into
- command: output
- terminator: ;
- suffix: sufx
- terminator: ;
>>> print (c.parser.parseString('output to paste buffer >> ').dump())
['output', 'to paste buffer', '>>', '']
- args: to paste buffer
- command: output
- output: >>
- statement: ['output', 'to paste buffer']
- args: to paste buffer
- command: output
>>> print (c.parser.parseString('ignore the /* commented | > */ stuff;').dump())
['ignore', 'the /* commented | > */ stuff', ';', '']
- args: the /* commented | > */ stuff
- command: ignore
- statement: ['ignore', 'the /* commented | > */ stuff', ';']
- args: the /* commented | > */ stuff
- command: ignore
- terminator: ;
- terminator: ;
>>> print (c.parser.parseString('has > inside;').dump())
['has', '> inside', ';', '']
- args: > inside
- command: has
- statement: ['has', '> inside', ';']
- args: > inside
- command: has
- terminator: ;
- terminator: ;
>>> print (c.parser.parseString('multiline has > inside an unfinished command').dump())
['multiline', ' has > inside an unfinished command']
- multilineCommand: multiline
>>> print (c.parser.parseString('multiline has > inside;').dump())
['multiline', 'has > inside', ';', '']
- args: has > inside
- multilineCommand: multiline
- statement: ['multiline', 'has > inside', ';']
- args: has > inside
- multilineCommand: multiline
- terminator: ;
- terminator: ;
>>> print (c.parser.parseString('multiline command /* with comment in progress;').dump())
['multiline', ' command /* with comment in progress;']
- multilineCommand: multiline
>>> print (c.parser.parseString('multiline command /* with comment complete */ is done;').dump())
['multiline', 'command /* with comment complete */ is done', ';', '']
- args: command /* with comment complete */ is done
- multilineCommand: multiline
- statement: ['multiline', 'command /* with comment complete */ is done', ';']
- args: command /* with comment complete */ is done
- multilineCommand: multiline
- terminator: ;
- terminator: ;
>>> print (c.parser.parseString('multiline command ends\n\n').dump())
['multiline', 'command ends', '\n', '\n']
- args: command ends
- multilineCommand: multiline
- statement: ['multiline', 'command ends', '\n', '\n']
- args: command ends
- multilineCommand: multiline
- terminator: ['\n', '\n']
- terminator: ['\n', '\n']
>>> print (c.parser.parseString('multiline command "with term; ends" now\n\n').dump())
['multiline', 'command "with term; ends" now', '\n', '\n']
- args: command "with term; ends" now
- multilineCommand: multiline
- statement: ['multiline', 'command "with term; ends" now', '\n', '\n']
- args: command "with term; ends" now
- multilineCommand: multiline
- terminator: ['\n', '\n']
- terminator: ['\n', '\n']
>>> print (c.parser.parseString('what if "quoted strings /* seem to " start comments?').dump())
['what', 'if "quoted strings /* seem to " start comments?']
- args: if "quoted strings /* seem to " start comments?
- command: what
- statement: ['what', 'if "quoted strings /* seem to " start comments?']
- args: if "quoted strings /* seem to " start comments?
- command: what
'''
#outputParser = (pyparsing.Literal('>>') | (pyparsing.WordStart() + '>') | pyparsing.Regex('[^=]>'))('output')
outputParser = (pyparsing.Literal(self.redirector *2) | \
(pyparsing.WordStart() + self.redirector) | \
pyparsing.Regex('[^=]' + self.redirector))('output')
terminatorParser = pyparsing.Or([(hasattr(t, 'parseString') and t) or pyparsing.Literal(t) for t in self.terminators])('terminator')
stringEnd = pyparsing.stringEnd ^ '\nEOF'
self.multilineCommand = pyparsing.Or([pyparsing.Keyword(c, caseless=self.case_insensitive) for c in self.multilineCommands])('multilineCommand')
oneLineCommand = (~self.multilineCommand + pyparsing.Word(self.legalChars))('command')
pipe = pyparsing.Keyword('|', identChars='|')
self.commentGrammars.ignore(pyparsing.quotedString).setParseAction(lambda x: '')
doNotParse = self.commentGrammars | self.commentInProgress | pyparsing.quotedString
afterElements = \
pyparsing.Optional(pipe + pyparsing.SkipTo(outputParser ^ stringEnd, ignore=doNotParse)('pipeTo')) + \
pyparsing.Optional(outputParser + pyparsing.SkipTo(stringEnd, ignore=doNotParse).setParseAction(lambda x: x[0].strip())('outputTo'))
if self.case_insensitive:
self.multilineCommand.setParseAction(lambda x: x[0].lower())
oneLineCommand.setParseAction(lambda x: x[0].lower())
if self.blankLinesAllowed:
self.blankLineTerminationParser = pyparsing.NoMatch
else:
self.blankLineTerminator = (pyparsing.lineEnd + pyparsing.lineEnd)('terminator')
self.blankLineTerminator.setResultsName('terminator')
self.blankLineTerminationParser = ((self.multilineCommand ^ oneLineCommand) + pyparsing.SkipTo(self.blankLineTerminator, ignore=doNotParse).setParseAction(lambda x: x[0].strip())('args') + self.blankLineTerminator)('statement')
self.multilineParser = (((self.multilineCommand ^ oneLineCommand) + pyparsing.SkipTo(terminatorParser, ignore=doNotParse).setParseAction(lambda x: x[0].strip())('args') + terminatorParser)('statement') +
pyparsing.SkipTo(outputParser ^ pipe ^ stringEnd, ignore=doNotParse).setParseAction(lambda x: x[0].strip())('suffix') + afterElements)
self.multilineParser.ignore(self.commentInProgress)
self.singleLineParser = ((oneLineCommand + pyparsing.SkipTo(terminatorParser ^ stringEnd ^ pipe ^ outputParser, ignore=doNotParse).setParseAction(lambda x:x[0].strip())('args'))('statement') +
pyparsing.Optional(terminatorParser) + afterElements)
#self.multilineParser = self.multilineParser.setResultsName('multilineParser')
#self.singleLineParser = self.singleLineParser.setResultsName('singleLineParser')
self.blankLineTerminationParser = self.blankLineTerminationParser.setResultsName('statement')
self.parser = self.prefixParser + (
stringEnd |
self.multilineParser |
self.singleLineParser |
self.blankLineTerminationParser |
self.multilineCommand + pyparsing.SkipTo(stringEnd, ignore=doNotParse)
)
self.parser.ignore(self.commentGrammars)
inputMark = pyparsing.Literal('<')
inputMark.setParseAction(lambda x: '')
fileName = pyparsing.Word(self.legalChars + '/\\')
inputFrom = fileName('inputFrom')
inputFrom.setParseAction(replace_with_file_contents)
# a not-entirely-satisfactory way of distinguishing < as in "import from" from <
# as in "lesser than"
self.inputParser = inputMark + pyparsing.Optional(inputFrom) + pyparsing.Optional('>') + \
pyparsing.Optional(fileName) + (pyparsing.stringEnd | '|')
self.inputParser.ignore(self.commentInProgress)
def preparse(self, raw, **kwargs):
return raw
def postparse(self, parseResult):
return parseResult
def parsed(self, raw, **kwargs):
if isinstance(raw, ParsedString):
p = raw
else:
# preparse is an overridable hook; default makes no changes
s = self.preparse(raw, **kwargs)
s = self.inputParser.transformString(s.lstrip())
s = self.commentGrammars.transformString(s)
for (shortcut, expansion) in self.shortcuts:
if s.lower().startswith(shortcut):
s = s.replace(shortcut, expansion + ' ', 1)
break
result = self.parser.parseString(s)
result['raw'] = raw
result['command'] = result.multilineCommand or result.command
result = self.postparse(result)
p = ParsedString(result.args)
p.parsed = result
p.parser = self.parsed
for (key, val) in kwargs.items():
p.parsed[key] = val
return p
def postparsing_precmd(self, statement):
stop = 0
return stop, statement
def postparsing_postcmd(self, stop):
return stop
def func_named(self, arg):
result = None
target = 'do_' + arg
if target in dir(self):
result = target
else:
if self.abbrev: # accept shortened versions of commands
funcs = [fname for fname in self.keywords if fname.startswith(arg)]
if len(funcs) == 1:
result = 'do_' + funcs[0]
return result
def onecmd_plus_hooks(self, line):
# The outermost level of try/finally nesting can be condensed once
# Python 2.4 support can be dropped.
stop = 0
try:
try:
statement = self.complete_statement(line)
(stop, statement) = self.postparsing_precmd(statement)
if stop:
return self.postparsing_postcmd(stop)
if statement.parsed.command not in self.excludeFromHistory:
self.history.append(statement.parsed.raw)
try:
self.redirect_output(statement)
timestart = datetime.datetime.now()
statement = self.precmd(statement)
stop = self.onecmd(statement)
stop = self.postcmd(stop, statement)
if self.timing:
self.pfeedback('Elapsed: %s' % str(datetime.datetime.now() - timestart))
finally:
self.restore_output(statement)
except EmptyStatement:
return 0
except Exception as e:
self.perror(str(e), statement)
finally:
return self.postparsing_postcmd(stop)
def complete_statement(self, line):
"""Keep accepting lines of input until the command is complete."""
if (not line) or (
not pyparsing.Or(self.commentGrammars).
setParseAction(lambda x: '').transformString(line)):
raise EmptyStatement()
statement = self.parsed(line)
while statement.parsed.multilineCommand and (statement.parsed.terminator == ''):
statement = '%s\n%s' % (statement.parsed.raw,
self.pseudo_raw_input(self.continuation_prompt))
statement = self.parsed(statement)
if not statement.parsed.command:
raise EmptyStatement()
return statement
def redirect_output(self, statement):
if statement.parsed.pipeTo:
self.kept_state = Statekeeper(self, ('stdout',))
self.kept_sys = Statekeeper(sys, ('stdout',))
self.redirect = subprocess.Popen(statement.parsed.pipeTo, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
sys.stdout = self.stdout = self.redirect.stdin
elif statement.parsed.output:
if (not statement.parsed.outputTo) and (not can_clip):
raise EnvironmentError('Cannot redirect to paste buffer; install ``xclip`` and re-run to enable')
self.kept_state = Statekeeper(self, ('stdout',))
self.kept_sys = Statekeeper(sys, ('stdout',))
if statement.parsed.outputTo:
mode = 'w'
if statement.parsed.output == 2 * self.redirector:
mode = 'a'
sys.stdout = self.stdout = open(os.path.expanduser(statement.parsed.outputTo), mode)
else:
sys.stdout = self.stdout = tempfile.TemporaryFile(mode="w+")
if statement.parsed.output == '>>':
self.stdout.write(get_paste_buffer())
def restore_output(self, statement):
if self.kept_state:
if statement.parsed.output:
if not statement.parsed.outputTo:
self.stdout.seek(0)
write_to_paste_buffer(self.stdout.read())
elif statement.parsed.pipeTo:
for result in self.redirect.communicate():
self.kept_state.stdout.write(result or '')
self.stdout.close()
self.kept_state.restore()
self.kept_sys.restore()
self.kept_state = None
def onecmd(self, line):
"""Interpret the argument as though it had been typed in response
to the prompt.
This may be overridden, but should not normally need to be;
see the precmd() and postcmd() methods for useful execution hooks.
The return value is a flag indicating whether interpretation of
commands by the interpreter should stop.
This (`cmd2`) version of `onecmd` already override's `cmd`'s `onecmd`.
"""
statement = self.parsed(line)
self.lastcmd = statement.parsed.raw
funcname = self.func_named(statement.parsed.command)
if not funcname:
return self._default(statement)
try:
func = getattr(self, funcname)
except AttributeError:
return self._default(statement)
stop = func(statement)
return stop
def _default(self, statement):
arg = statement.full_parsed_statement()
if self.default_to_shell:
result = os.system(arg)
if not result:
return self.postparsing_postcmd(None)
return self.postparsing_postcmd(self.default(arg))
def pseudo_raw_input(self, prompt):
"""copied from cmd's cmdloop; like raw_input, but accounts for changed stdin, stdout"""
if self.use_rawinput:
try:
line = raw_input(prompt)
except EOFError:
line = 'EOF'
else:
self.stdout.write(prompt)
self.stdout.flush()
line = self.stdin.readline()
if not len(line):
line = 'EOF'
else:
if line[-1] == '\n': # this was always true in Cmd
line = line[:-1]
return line
def _cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
# An almost perfect copy from Cmd; however, the pseudo_raw_input portion
# has been split out so that it can be called separately
self.preloop()
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey+": complete")
except ImportError:
pass
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
line = self.pseudo_raw_input(self.prompt)
if (self.echo) and (isinstance(self.stdin, file)):
self.stdout.write(line + '\n')
stop = self.onecmd_plus_hooks(line)
self.postloop()
finally:
if self.use_rawinput and self.completekey:
try:
import readline
readline.set_completer(self.old_completer)
except ImportError:
pass
return stop
def do_EOF(self, arg):
return self._STOP_SCRIPT_NO_EXIT # End of script; should not exit app
do_eof = do_EOF
def do_quit(self, arg):
return self._STOP_AND_EXIT
do_exit = do_quit
do_q = do_quit
def select(self, options, prompt='Your choice? '):
'''Presents a numbered menu to the user. Modelled after
the bash shell's SELECT. Returns the item chosen.
Argument ``options`` can be:
| a single string -> will be split into one-word options
| a list of strings -> will be offered as options
| a list of tuples -> interpreted as (value, text), so
that the return value can differ from
the text advertised to the user '''
if isinstance(options, basestring):
options = zip(options.split(), options.split())
fulloptions = []
for opt in options:
if isinstance(opt, basestring):
fulloptions.append((opt, opt))
else:
try:
fulloptions.append((opt[0], opt[1]))
except IndexError:
fulloptions.append((opt[0], opt[0]))
for (idx, (value, text)) in enumerate(fulloptions):
self.poutput(' %2d. %s\n' % (idx+1, text))
while True:
response = raw_input(prompt)
try:
response = int(response)
result = fulloptions[response - 1][0]
break
except ValueError:
pass # loop and ask again
return result
@options([make_option('-l', '--long', action="store_true",
help="describe function of parameter")])
def do_show(self, arg, opts):
'''Shows value of a parameter.'''
param = arg.strip().lower()
result = {}
maxlen = 0
for p in self.settable:
if (not param) or p.startswith(param):
result[p] = '%s: %s' % (p, str(getattr(self, p)))
maxlen = max(maxlen, len(result[p]))
if result:
for p in sorted(result):
if opts.long:
self.poutput('%s # %s' % (result[p].ljust(maxlen), self.settable[p]))
else:
self.poutput(result[p])
else:
raise NotImplementedError("Parameter '%s' not supported (type 'show' for list of parameters)." % param)
def do_set(self, arg):
'''
Sets a cmd2 parameter. Accepts abbreviated parameter names so long
as there is no ambiguity. Call without arguments for a list of
settable parameters with their values.'''
try:
statement, paramName, val = arg.parsed.raw.split(None, 2)
val = val.strip()
paramName = paramName.strip().lower()
if paramName not in self.settable:
hits = [p for p in self.settable if p.startswith(paramName)]
if len(hits) == 1:
paramName = hits[0]
else:
return self.do_show(paramName)
currentVal = getattr(self, paramName)
if (val[0] == val[-1]) and val[0] in ("'", '"'):
val = val[1:-1]
else:
val = cast(currentVal, val)
setattr(self, paramName, val)
self.stdout.write('%s - was: %s\nnow: %s\n' % (paramName, currentVal, val))
if currentVal != val:
try:
onchange_hook = getattr(self, '_onchange_%s' % paramName)
onchange_hook(old=currentVal, new=val)
except AttributeError:
pass
except (ValueError, AttributeError, NotSettableError) as e:
self.do_show(arg)
def do_pause(self, arg):
'Displays the specified text then waits for the user to press RETURN.'
raw_input(arg + '\n')
def do_shell(self, arg):
'execute a command as if at the OS prompt.'
os.system(arg)
def do_py(self, arg):
'''
py <command>: Executes a Python command.
py: Enters interactive Python mode.
End with ``Ctrl-D`` (Unix) / ``Ctrl-Z`` (Windows), ``quit()``, '`exit()``.
Non-python commands can be issued with ``cmd("your command")``.
Run python code from external files with ``run("filename.py")``
'''
self.pystate['self'] = self
arg = arg.parsed.raw[2:].strip()
localvars = (self.locals_in_py and self.pystate) or {}
interp = InteractiveConsole(locals=localvars)
interp.runcode('import sys, os;sys.path.insert(0, os.getcwd())')
if arg.strip():
interp.runcode(arg)
else:
def quit():
raise EmbeddedConsoleExit
def onecmd_plus_hooks(arg):
return self.onecmd_plus_hooks(arg + '\n')
def run(arg):
try:
file = open(arg)
interp.runcode(file.read())
file.close()
except IOError as e:
self.perror(e)
self.pystate['quit'] = quit
self.pystate['exit'] = quit
self.pystate['cmd'] = onecmd_plus_hooks
self.pystate['run'] = run
try:
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
keepstate = Statekeeper(sys, ('stdin','stdout'))
sys.stdout = self.stdout
sys.stdin = self.stdin
interp.interact(banner= "Python %s on %s\n%s\n(%s)\n%s" %
(sys.version, sys.platform, cprt, self.__class__.__name__, self.do_py.__doc__))
except EmbeddedConsoleExit:
pass
keepstate.restore()
@options([make_option('-s', '--script', action="store_true", help="Script format; no separation lines"),
], arg_desc = '(limit on which commands to include)')
def do_history(self, arg, opts):
"""history [arg]: lists past commands issued
| no arg: list all
| arg is integer: list one history item, by index
| arg is string: string search
| arg is /enclosed in forward-slashes/: regular expression search
"""
if arg:
history = self.history.get(arg)
else:
history = self.history
for hi in history:
if opts.script:
self.poutput(hi)
else:
self.stdout.write(hi.pr())
def last_matching(self, arg):
try:
if arg:
return self.history.get(arg)[-1]
else:
return self.history[-1]
except IndexError:
return None
def do_list(self, arg):
"""list [arg]: lists last command issued
no arg -> list most recent command
arg is integer -> list one history item, by index
a..b, a:b, a:, ..b -> list spans from a (or start) to b (or end)
arg is string -> list all commands matching string search
arg is /enclosed in forward-slashes/ -> regular expression search
"""
try:
history = self.history.span(arg or '-1')
except IndexError:
history = self.history.search(arg)
for hi in history:
self.poutput(hi.pr())
do_hi = do_history
do_l = do_list
do_li = do_list
def do_ed(self, arg):
"""ed: edit most recent command in text editor
ed [N]: edit numbered command from history
ed [filename]: edit specified file name
commands are run after editor is closed.
"set edit (program-name)" or set EDITOR environment variable
to control which editing program is used."""
if not self.editor:
raise EnvironmentError("Please use 'set editor' to specify your text editing program of choice.")
filename = self.default_file_name
if arg:
try:
buffer = self.last_matching(int(arg))
except ValueError:
filename = arg
buffer = ''
else:
buffer = self.history[-1]
if buffer:
f = open(os.path.expanduser(filename), 'w')
f.write(buffer or '')
f.close()
os.system('%s %s' % (self.editor, filename))
self.do__load(filename)
do_edit = do_ed
saveparser = (pyparsing.Optional(pyparsing.Word(pyparsing.nums)^'*')("idx") +
pyparsing.Optional(pyparsing.Word(legalChars + '/\\'))("fname") +
pyparsing.stringEnd)
def do_save(self, arg):
"""`save [N] [filename.ext]`
Saves command from history to file.
| N => Number of command (from history), or `*`;
| most recent command if omitted"""
try:
args = self.saveparser.parseString(arg)
except pyparsing.ParseException:
self.perror('Could not understand save target %s' % arg)
raise SyntaxError(self.do_save.__doc__)
fname = args.fname or self.default_file_name
if args.idx == '*':
saveme = '\n\n'.join(self.history[:])
elif args.idx:
saveme = self.history[int(args.idx)-1]
else:
saveme = self.history[-1]
try:
f = open(os.path.expanduser(fname), 'w')
f.write(saveme)
f.close()
self.pfeedback('Saved to %s' % (fname))
except Exception as e:
self.perror('Error saving %s' % (fname))
raise
def read_file_or_url(self, fname):
# TODO: not working on localhost
if isinstance(fname, file):
result = open(fname, 'r')
else:
match = self.urlre.match(fname)
if match:
result = urllib.urlopen(match.group(1))
else:
fname = os.path.expanduser(fname)
try:
result = open(os.path.expanduser(fname), 'r')
except IOError:
result = open('%s.%s' % (os.path.expanduser(fname),
self.defaultExtension), 'r')
return result
def do__relative_load(self, arg=None):
'''
Runs commands in script at file or URL; if this is called from within an
already-running script, the filename will be interpreted relative to the
already-running script's directory.'''
if arg:
arg = arg.split(None, 1)
targetname, args = arg[0], (arg[1:] or [''])[0]
targetname = os.path.join(self.current_script_dir or '', targetname)
self.do__load('%s %s' % (targetname, args))
urlre = re.compile('(https?://[-\\w\\./]+)')
def do_load(self, arg=None):
"""Runs script of command(s) from a file or URL."""
if arg is None:
targetname = self.default_file_name
else:
arg = arg.split(None, 1)
targetname, args = arg[0], (arg[1:] or [''])[0].strip()
try:
target = self.read_file_or_url(targetname)
except IOError as e:
self.perror('Problem accessing script from %s: \n%s' % (targetname, e))
return
keepstate = Statekeeper(self, ('stdin','use_rawinput','prompt',
'continuation_prompt','current_script_dir'))
self.stdin = target
self.use_rawinput = False
self.prompt = self.continuation_prompt = ''
self.current_script_dir = os.path.split(targetname)[0]
stop = self._cmdloop()
self.stdin.close()
keepstate.restore()
self.lastcmd = ''
return stop and (stop != self._STOP_SCRIPT_NO_EXIT)
do__load = do_load # avoid an unfortunate legacy use of do_load from sqlpython
def do_run(self, arg):
"""run [arg]: re-runs an earlier command
no arg -> run most recent command
arg is integer -> run one history item, by index
arg is string -> run most recent command by string search
arg is /enclosed in forward-slashes/ -> run most recent by regex
"""
'run [N]: runs the SQL that was run N commands ago'
runme = self.last_matching(arg)
self.pfeedback(runme)
if runme:
stop = self.onecmd_plus_hooks(runme)
do_r = do_run
def fileimport(self, statement, source):
try:
f = open(os.path.expanduser(source))
except IOError:
self.stdout.write("Couldn't read from file %s\n" % source)
return ''
data = f.read()
f.close()
return data
def runTranscriptTests(self, callargs):
class TestMyAppCase(Cmd2TestCase):
CmdApp = self.__class__
self.__class__.testfiles = callargs
sys.argv = [sys.argv[0]] # the --test argument upsets unittest.main()
testcase = TestMyAppCase()
runner = unittest.TextTestRunner()
result = runner.run(testcase)
result.printErrors()
def run_commands_at_invocation(self, callargs):
for initial_command in callargs:
if self.onecmd_plus_hooks(initial_command + '\n'):
return self._STOP_AND_EXIT
def cmdloop(self):
parser = optparse.OptionParser()
parser.add_option('-t', '--test', dest='test',
action="store_true",
help='Test against transcript(s) in FILE (wildcards OK)')
(callopts, callargs) = parser.parse_args()
if callopts.test:
self.runTranscriptTests(callargs)
else:
if not self.run_commands_at_invocation(callargs):
self._cmdloop()
class HistoryItem(str):
listformat = '-------------------------[%d]\n%s\n'
def __init__(self, instr):
str.__init__(self)
self.lowercase = self.lower()
self.idx = None
def pr(self):
return self.listformat % (self.idx, str(self))
class History(list):
'''A list of HistoryItems that knows how to respond to user requests.
>>> h = History([HistoryItem('first'), HistoryItem('second'), HistoryItem('third'), HistoryItem('fourth')])
>>> h.span('-2..')
['third', 'fourth']
>>> h.span('2..3')
['second', 'third']
>>> h.span('3')
['third']
>>> h.span(':')
['first', 'second', 'third', 'fourth']
>>> h.span('2..')
['second', 'third', 'fourth']
>>> h.span('-1')
['fourth']
>>> h.span('-2..-3')
['third', 'second']
>>> h.search('o')
['second', 'fourth']
>>> h.search('/IR/')
['first', 'third']
'''
def zero_based_index(self, onebased):
result = onebased
if result > 0:
result -= 1
return result
def to_index(self, raw):
if raw:
result = self.zero_based_index(int(raw))
else:
result = None
return result
def search(self, target):
target = target.strip()
if target[0] == target[-1] == '/' and len(target) > 1:
target = target[1:-1]
else:
target = re.escape(target)
pattern = re.compile(target, re.IGNORECASE)
return [s for s in self if pattern.search(s)]
spanpattern = re.compile(r'^\s*(?P<start>\-?\d+)?\s*(?P<separator>:|(\.{2,}))?\s*(?P<end>\-?\d+)?\s*$')
def span(self, raw):
if raw.lower() in ('*', '-', 'all'):
raw = ':'
results = self.spanpattern.search(raw)
if not results:
raise IndexError
if not results.group('separator'):
return [self[self.to_index(results.group('start'))]]
start = self.to_index(results.group('start'))
end = self.to_index(results.group('end'))
reverse = False
if end is not None:
if end < start:
(start, end) = (end, start)
reverse = True
end += 1
result = self[start:end]
if reverse:
result.reverse()
return result
rangePattern = re.compile(r'^\s*(?P<start>[\d]+)?\s*\-\s*(?P<end>[\d]+)?\s*$')
def append(self, new):
new = HistoryItem(new)
list.append(self, new)
new.idx = len(self)
def extend(self, new):
for n in new:
self.append(n)
def get(self, getme=None, fromEnd=False):
if not getme:
return self
try:
getme = int(getme)
if getme < 0:
return self[:(-1 * getme)]
else:
return [self[getme-1]]
except IndexError:
return []
except ValueError:
rangeResult = self.rangePattern.search(getme)
if rangeResult:
start = rangeResult.group('start') or None
end = rangeResult.group('start') or None
if start:
start = int(start) - 1
if end:
end = int(end)
return self[start:end]
getme = getme.strip()
if getme.startswith(r'/') and getme.endswith(r'/'):
finder = re.compile(getme[1:-1], re.DOTALL | re.MULTILINE | re.IGNORECASE)
def isin(hi):
return finder.search(hi)
else:
def isin(hi):
return (getme.lower() in hi.lowercase)
return [itm for itm in self if isin(itm)]
class NotSettableError(Exception):
pass
def cast(current, new):
"""Tries to force a new value into the same type as the current."""
typ = type(current)
if typ == bool:
try:
return bool(int(new))
except (ValueError, TypeError):
pass
try:
new = new.lower()
except:
pass
if (new=='on') or (new[0] in ('y','t')):
return True
if (new=='off') or (new[0] in ('n','f')):
return False
else:
try:
return typ(new)
except:
pass
print ("Problem setting parameter (now %s) to %s; incorrect type?" % (current, new))
return current
class Statekeeper(object):
def __init__(self, obj, attribs):
self.obj = obj
self.attribs = attribs
if self.obj:
self.save()
def save(self):
for attrib in self.attribs:
setattr(self, attrib, getattr(self.obj, attrib))
def restore(self):
if self.obj:
for attrib in self.attribs:
setattr(self.obj, attrib, getattr(self, attrib))
class Borg(object):
'''All instances of any Borg subclass will share state.
from Python Cookbook, 2nd Ed., recipe 6.16'''
_shared_state = {}
def __new__(cls, *a, **k):
obj = object.__new__(cls, *a, **k)
obj.__dict__ = cls._shared_state
return obj
class OutputTrap(Borg):
'''Instantiate an OutputTrap to divert/capture ALL stdout output. For use in unit testing.
Call `tearDown()` to return to normal output.'''
def __init__(self):
self.contents = ''
self.old_stdout = sys.stdout
sys.stdout = self
def write(self, txt):
self.contents += txt
def read(self):
result = self.contents
self.contents = ''
return result
def tearDown(self):
sys.stdout = self.old_stdout
self.contents = ''
class Cmd2TestCase(unittest.TestCase):
'''Subclass this, setting CmdApp, to make a unittest.TestCase class
that will execute the commands in a transcript file and expect the results shown.
See example.py'''
CmdApp = None
def fetchTranscripts(self):
self.transcripts = {}
for fileset in self.CmdApp.testfiles:
for fname in glob.glob(fileset):
tfile = open(fname)
self.transcripts[fname] = iter(tfile.readlines())
tfile.close()
if not len(self.transcripts):
raise StandardError("No test files found - nothing to test.")
def setUp(self):
if self.CmdApp:
self.outputTrap = OutputTrap()
self.cmdapp = self.CmdApp()
self.fetchTranscripts()
def runTest(self): # was testall
if self.CmdApp:
its = sorted(self.transcripts.items())
for (fname, transcript) in its:
self._test_transcript(fname, transcript)
regexPattern = pyparsing.QuotedString(quoteChar=r'/', escChar='\\', multiline=True, unquoteResults=True)
regexPattern.ignore(pyparsing.cStyleComment)
notRegexPattern = pyparsing.Word(pyparsing.printables)
notRegexPattern.setParseAction(lambda t: re.escape(t[0]))
expectationParser = regexPattern | notRegexPattern
anyWhitespace = re.compile(r'\s', re.DOTALL | re.MULTILINE)
def _test_transcript(self, fname, transcript):
lineNum = 0
finished = False
line = transcript.next()
lineNum += 1
tests_run = 0
while not finished:
# Scroll forward to where actual commands begin
while not line.startswith(self.cmdapp.prompt):
try:
line = transcript.next()
except StopIteration:
finished = True
break
lineNum += 1
command = [line[len(self.cmdapp.prompt):]]
line = transcript.next()
# Read the entirety of a multi-line command
while line.startswith(self.cmdapp.continuation_prompt):
command.append(line[len(self.cmdapp.continuation_prompt):])
try:
line = transcript.next()
except StopIteration:
raise (StopIteration,
'Transcript broke off while reading command beginning at line %d with\n%s'
% (command[0]))
lineNum += 1
command = ''.join(command)
# Send the command into the application and capture the resulting output
stop = self.cmdapp.onecmd_plus_hooks(command)
#TODO: should act on ``stop``
result = self.outputTrap.read()
# Read the expected result from transcript
if line.startswith(self.cmdapp.prompt):
message = '\nFile %s, line %d\nCommand was:\n%r\nExpected: (nothing)\nGot:\n%r\n'%\
(fname, lineNum, command, result)
self.assert_(not(result.strip()), message)
continue
expected = []
while not line.startswith(self.cmdapp.prompt):
expected.append(line)
try:
line = transcript.next()
except StopIteration:
finished = True
break
lineNum += 1
expected = ''.join(expected)
# Compare actual result to expected
message = '\nFile %s, line %d\nCommand was:\n%s\nExpected:\n%s\nGot:\n%s\n'%\
(fname, lineNum, command, expected, result)
expected = self.expectationParser.transformString(expected)
# checking whitespace is a pain - let's skip it
expected = self.anyWhitespace.sub('', expected)
result = self.anyWhitespace.sub('', result)
self.assert_(re.match(expected, result, re.MULTILINE | re.DOTALL), message)
def tearDown(self):
if self.CmdApp:
self.outputTrap.tearDown()
if __name__ == '__main__':
doctest.testmod(optionflags = doctest.NORMALIZE_WHITESPACE)
'''
To make your application transcript-testable, replace
::
app = MyApp()
app.cmdloop()
with
::
app = MyApp()
cmd2.run(app)
Then run a session of your application and paste the entire screen contents
into a file, ``transcript.test``, and invoke the test like::
python myapp.py --test transcript.test
Wildcards can be used to test against multiple transcript files.
'''
| apache-2.0 | 4,614,588,329,020,379,000 | 39.785311 | 239 | 0.552293 | false |
CargoSpace/CargoSpaceChallenge | contest/forms.py | 1 | 1081 | from django import forms
from contest.models import ContestSubmission, ProblemSet, ProblemInput
class ContestSubmissionForm(forms.ModelForm):
class Meta:
model = ContestSubmission
exclude = ["submitted_by","accepted"]
fields = [
"contest",
"problem",
"submission",#forms.FileField()
]
class ProblemSetForm(forms.ModelForm):
class Meta:
model = ProblemSet
exclude = ["created_by","problem_color", "language",
"problem_stdin", "problem_stdin"]
fields = [
"problem_type",
"title",
"source_credit",
"description",
"example_input",#forms.FileField()
"example_output",#forms.FileField()
"problem_type",
"image",
"input_description",
"output_description",
"constraints",
]
class ProblemInputForm(forms.ModelForm):
class Meta:
model = ProblemInput
exclude = ["problem","label","language",
"problem_type",
"title",
"source_credit",
"description",
"example_input",
"example_output",
"problem_type",]
fields = [
"problem_stdin",#forms.FileField()
"problem_stdout",#forms.FileField()
] | apache-2.0 | -6,713,205,025,555,224,000 | 21.081633 | 70 | 0.666975 | false |
dropbox/emmer | tests/test_conversation_manager.py | 1 | 2183 | import os
import sys
import unittest
sys.path.append(os.path.join(os.path.dirname(__file__), "../emmer"))
from conversation_table import ConversationTable
class StubConversation(object):
pass
class TestConversationTable(unittest.TestCase):
def test_add_get(self):
table = ConversationTable()
conversation = StubConversation()
table.add_conversation("127.0.0.1", "3942", conversation)
self.assertEqual(table.get_conversation("127.0.0.1", "3942"),
conversation)
self.assertTrue(table.lock._RLock__count == 0)
def test_get_without_add(self):
table = ConversationTable()
self.assertIsNone(table.get_conversation("127.0.0.1", "3942"))
self.assertTrue(table.lock._RLock__count == 0)
def test_add_delete(self):
table = ConversationTable()
conversation = StubConversation()
table.add_conversation("127.0.0.1", "3942", conversation)
self.assertTrue(table.delete_conversation("127.0.0.1", "3942"))
self.assertIsNone(table.get_conversation("127.0.0.1", "3942"))
self.assertTrue(table.lock._RLock__count == 0)
def test_delete_without_add(self):
# Seems uninteresting, but this test is useful to defend against
# exceptions
table = ConversationTable()
self.assertEqual(table.delete_conversation("127.0.0.1", "3942"),
False)
self.assertIsNone(table.get_conversation("127.0.0.1", "3942"))
self.assertTrue(table.lock._RLock__count == 0)
def test_conversations(self):
table = ConversationTable()
conversation_one = StubConversation()
table.add_conversation("10.0.0.1", "3942", conversation_one)
conversation_two = StubConversation()
table.add_conversation("10.0.0.2", "3942", conversation_two)
# Either order of returned results is fine
self.assertTrue(
table.conversations == [conversation_one, conversation_two]
or table.conversations == [conversation_two, conversation_one],
"conversations retrieved don't match")
if __name__ == "__main__":
unittest.main()
| mit | -5,287,262,184,213,640,000 | 37.298246 | 75 | 0.639945 | false |
michath/MetaMonkey | js/src/config/expandlibs_gen.py | 7 | 1930 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''Given a list of object files and library names, prints a library
descriptor to standard output'''
from __future__ import with_statement
import sys
import os
import expandlibs_config as conf
from expandlibs import LibDescriptor, isObject, ensureParentDir, ExpandLibsDeps
from optparse import OptionParser
def generate(args):
desc = LibDescriptor()
for arg in args:
if isObject(arg):
if os.path.exists(arg):
desc['OBJS'].append(os.path.abspath(arg))
else:
raise Exception("File not found: %s" % arg)
elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
desc['LIBS'].append(os.path.abspath(arg))
else:
raise Exception("File not found: %s" % arg)
return desc
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("--depend", dest="depend", metavar="FILE",
help="generate dependencies for the given execution and store it in the given file")
parser.add_option("-o", dest="output", metavar="FILE",
help="send output to the given file")
(options, args) = parser.parse_args()
if not options.output:
raise Exception("Missing option: -o")
ensureParentDir(options.output)
with open(options.output, 'w') as outfile:
print >>outfile, generate(args)
if options.depend:
ensureParentDir(options.depend)
with open(options.depend, 'w') as depfile:
deps = ExpandLibsDeps(args)
depfile.write("%s : %s\n" % (options.output, ' '.join(deps)))
for dep in deps:
depfile.write("%s :\n" % dep)
| mpl-2.0 | -1,199,564,518,024,695,300 | 37.6 | 92 | 0.629534 | false |
ohsu-computational-biology/server | tests/unit/test_response_generators.py | 1 | 7668 | """
Tests the backend response generators
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import ga4gh.backend as backend
import ga4gh.datamodel.reads as reads
import ga4gh.datamodel.variants as variants
import ga4gh.exceptions as exceptions
import ga4gh.protocol as protocol
import ga4gh.datarepo as datarepo
def generateVariant():
variant = protocol.Variant()
return variant
class MockVariantSet(variants.AbstractVariantSet):
def __init__(self, parentContainer, localId, numVariants):
super(MockVariantSet, self).__init__(parentContainer, localId)
self.numVariants = numVariants
def getVariants(self, referenceName, startPosition, endPosition,
variantName=None, callSetIds=None):
for i in range(self.numVariants):
yield generateVariant()
class TestVariantsGenerator(unittest.TestCase):
"""
Tests the logic of variantsGenerator
"""
def setUp(self):
self.request = protocol.SearchVariantsRequest()
self.backend = backend.Backend(datarepo.SimulatedDataRepository())
self.dataset = self.backend.getDataRepository().getDatasets()[0]
def testNonexistentVariantSet(self):
# a request for a variant set that doesn't exist should throw an error
variantSet = variants.AbstractVariantSet(
self.dataset, 'notFound')
self.request.variant_set_id = variantSet.getId()
with self.assertRaises(exceptions.VariantSetNotFoundException):
self.backend.variantsGenerator(self.request)
def testVariantSetEmpty(self):
# a variant set with no variants should return none
self._initVariantSet(0)
iterator = self.backend.variantsGenerator(self.request)
self.assertIsNone(next(iterator, None))
def testVariantSetOneVariant(self):
# a variant set with one variant should return it and a null pageToken
self._initVariantSet(1)
iterator = self.backend.variantsGenerator(self.request)
variant, nextPageToken = next(iterator)
self.assertIsNotNone(variant)
self.assertIsNone(nextPageToken)
self.assertIsNone(next(iterator, None))
def testVariantSetTwoVariants(self):
# a variant set with two variants should return the first with
# a non-null pageToken and the second with a null pageToken
self._initVariantSet(2)
iterator = self.backend.variantsGenerator(self.request)
variant, nextPageToken = next(iterator)
self.assertIsNotNone(variant)
self.assertIsNotNone(nextPageToken)
variant, nextPageToken = next(iterator)
self.assertIsNotNone(variant)
self.assertIsNone(nextPageToken)
self.assertIsNone(next(iterator, None))
def _initVariantSet(self, numVariants):
variantSet = MockVariantSet(
self.dataset, "mockvs", numVariants)
self.dataset.addVariantSet(variantSet)
self.request.variant_set_id = variantSet.getId()
def generateReadAlignment(position=0, sequence='abc'):
alignment = protocol.ReadAlignment()
alignment.alignment.position.position = position
alignment.aligned_sequence = sequence
return alignment
class MockReadGroup(reads.AbstractReadGroup):
def __init__(self, parentContainer, localId, numAlignments):
super(MockReadGroup, self).__init__(parentContainer, localId)
self.numAlignments = numAlignments
def getReadAlignments(self, referenceName=None, referenceId=None,
start=None, end=None):
for i in range(self.numAlignments):
yield generateReadAlignment(i)
class TestReadsGenerator(unittest.TestCase):
"""
Tests the logic of readsGenerator
"""
def setUp(self):
self.request = protocol.SearchReadsRequest()
self.backend = backend.Backend(
datarepo.SimulatedDataRepository(numAlignments=0))
dataRepo = self.backend.getDataRepository()
referenceSet = dataRepo.getReferenceSetByIndex(0)
reference = referenceSet.getReferenceByIndex(0)
self.request.reference_id = reference.getId()
self.dataset = dataRepo.getDatasets()[0]
self.readGroupSet = self.dataset.getReadGroupSets()[0]
def testNoReadGroupsNotSupported(self):
# a request for no read groups should throw an exception
with self.assertRaises(exceptions.BadRequestException):
self.backend.readsGenerator(self.request)
def testNonexistentReadGroup(self):
# a request for a readGroup that doesn't exist should throw an error
readGroup = reads.AbstractReadGroup(self.readGroupSet, 'notFound')
self.request.read_group_ids.extend([readGroup.getId()])
with self.assertRaises(exceptions.ReadGroupNotFoundException):
self.backend.readsGenerator(self.request)
def testReadGroupEmpty(self):
# a readGroup with no reads should return none
self._initReadGroup(0)
iterator = self.backend.readsGenerator(self.request)
self.assertIsNone(next(iterator, None))
def testReadGroupOneRead(self):
# a readGroup with one read should return it and a null nextPageToken
self._initReadGroup(1)
iterator = self.backend.readsGenerator(self.request)
alignment, nextPageToken = next(iterator)
self.assertIsNotNone(alignment)
self.assertIsNone(nextPageToken)
self.assertIsNone(next(iterator, None))
def testReadGroupTwoReads(self):
# a readGroup with two reads should return the first with
# a non-null pageToken and the second with a null pageToken
self._initReadGroup(2)
iterator = self.backend.readsGenerator(self.request)
alignment, nextPageToken = next(iterator)
self.assertIsNotNone(alignment)
self.assertIsNotNone(nextPageToken)
alignment, nextPageToken = next(iterator)
self.assertIsNotNone(alignment)
self.assertIsNone(nextPageToken)
self.assertIsNone(next(iterator, None))
def _initReadGroup(self, numAlignments):
readGroup = MockReadGroup(
self.readGroupSet, "mockrg", numAlignments)
self.readGroupSet.addReadGroup(readGroup)
self.request.read_group_ids.extend([readGroup.getId()])
class TestVariantsIntervalIteratorClassMethods(unittest.TestCase):
"""
Test the variants interval iterator class methods
"""
def setUp(self):
self.variant = protocol.Variant()
self.variant.start = 4
self.variant.end = 6
self.intervalIterator = backend.VariantsIntervalIterator
def testGetVariantStart(self):
result = self.intervalIterator._getStart(self.variant)
self.assertEqual(self.variant.start, result)
def testGetVariantEnd(self):
result = self.intervalIterator._getEnd(self.variant)
self.assertEqual(self.variant.end, result)
class TestReadsIntervalIteratorClassMethods(unittest.TestCase):
"""
Test the variants interval iterator class methods
"""
def setUp(self):
self.read = generateReadAlignment(5)
self.intervalIterator = backend.ReadsIntervalIterator
def testGetReadStart(self):
result = self.intervalIterator._getStart(self.read)
self.assertEqual(self.read.alignment.position.position, result)
def testGetReadEnd(self):
result = self.intervalIterator._getEnd(self.read)
self.assertEqual(
self.intervalIterator._getStart(self.read) +
len(self.read.aligned_sequence), result)
| apache-2.0 | 299,321,730,219,977,660 | 36.773399 | 78 | 0.699922 | false |
HomeRad/TorCleaner | tests/proxy/rfc2616/test_dateformat.py | 1 | 4662 | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2005-2010 Bastian Kleineidam
"""
"""
import time
from .. import ProxyTest
from wc.http.date import get_date_rfc850, get_date_rfc1123, get_date_asctime
class test_dateformat_warn_rfc1123_rfc850(ProxyTest):
"""
Test deletion of Warning: header with rfc850 date and
Date: header with rfc1123 date.
"""
def test_dateformat_warn_rfc1123_rfc850(self):
self.start_test()
def get_response_headers(self, content):
now = time.time()
warndate = get_date_rfc850(now - 5)
warning = '119 smee "hulla" "%s"' % warndate
date = get_date_rfc1123(now)
return [
"Content-Type: text/plain",
"Content-Length: %d" % len(content),
"Warning: %s" % warning,
"Date: %s" % date,
]
def check_response_headers(self, response):
self.assertTrue(not response.has_header("Warning"))
class test_dateformat_warn_rfc1123_asctime(ProxyTest):
"""
Test deletion of Warning: header with asctime date and
Date: header with rfc1123 date.
"""
def test_dateformat_warn_rfc1123_asctime(self):
self.start_test()
def get_response_headers(self, content):
now = time.time()
warndate = get_date_asctime(now - 5)
warning = '119 smee "hulla" "%s"' % warndate
date = get_date_rfc1123(now)
return [
"Content-Type: text/plain",
"Content-Length: %d" % len(content),
"Warning: %s" % warning,
"Date: %s" % date,
]
def check_response_headers(self, response):
self.assertTrue(not response.has_header("Warning"))
class test_dateformat_warn_rfc850_rfc1123(ProxyTest):
"""
Test deletion of Warning: header with rfc1123 date and
Date: header with rfc850 date.
"""
def test_dateformat_warn_rfc850_rfc1123(self):
self.start_test()
def get_response_headers(self, content):
now = time.time()
warndate = get_date_rfc1123(now - 5)
warning = '119 smee "hulla" "%s"' % warndate
date = get_date_rfc850(now)
return [
"Content-Type: text/plain",
"Content-Length: %d" % len(content),
"Warning: %s" % warning,
"Date: %s" % date,
]
def check_response_headers(self, response):
self.assertTrue(not response.has_header("Warning"))
class test_dateformat_warn_rfc850_asctime(ProxyTest):
"""
Test deletion of Warning: header with asctime date and
Date: header with rfc850 date.
"""
def test_dateformat_warn_rfc850_asctime(self):
self.start_test()
def get_response_headers(self, content):
now = time.time()
warndate = get_date_asctime(now - 5)
warning = '119 smee "hulla" "%s"' % warndate
date = get_date_rfc850(now)
return [
"Content-Type: text/plain",
"Content-Length: %d" % len(content),
"Warning: %s" % warning,
"Date: %s" % date,
]
def check_response_headers(self, response):
self.assertTrue(not response.has_header("Warning"))
class test_dateformat_warn_asctime_rfc1123(ProxyTest):
"""
Test deletion of Warning: header with rfc1123 date and
Date: header with asctime date.
"""
def test_dateformat_warn_asctime_rfc1123(self):
self.start_test()
def get_response_headers(self, content):
now = time.time()
warndate = get_date_rfc1123(now - 5)
warning = '119 smee "hulla" "%s"' % warndate
date = get_date_asctime(now)
return [
"Content-Type: text/plain",
"Content-Length: %d" % len(content),
"Warning: %s" % warning,
"Date: %s" % date,
]
def check_response_headers(self, response):
self.assertTrue(not response.has_header("Warning"))
class test_dateformat_warn_asctime_rfc850(ProxyTest):
"""
Test deletion of Warning: header with rfc850 date and
Date: header with asctime date.
"""
def test_dateformat_warn_asctime_rfc850(self):
self.start_test()
def get_response_headers(self, content):
now = time.time()
warndate = get_date_rfc850(now - 5)
warning = '119 smee "hulla" "%s"' % warndate
date = get_date_asctime(now)
return [
"Content-Type: text/plain",
"Content-Length: %d" % len(content),
"Warning: %s" % warning,
"Date: %s" % date,
]
def check_response_headers(self, response):
self.assertTrue(not response.has_header("Warning"))
| gpl-2.0 | 2,906,512,642,459,607,600 | 28.694268 | 76 | 0.587516 | false |
AtteqCom/zsl | src/zsl/resource/resource_helper.py | 1 | 6542 | """
:mod:`zsl.resource.resource_helper`
-----------------------------------
.. moduleauthor:: Peter Morihladko
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import *
from future.utils import viewitems, viewvalues
from sqlalchemy import and_, asc, desc, or_
from sqlalchemy.orm import class_mapper, joinedload
def filter_from_url_arg(model_cls, query, arg, query_operator=and_,
arg_types=None):
"""
Parse filter URL argument ``arg`` and apply to ``query``
Example: 'column1<=value,column2==value' -> query.filter(Model.column1 <= value, Model.column2 == value)
"""
fields = arg.split(',')
mapper = class_mapper(model_cls)
if not arg_types:
arg_types = {}
exprs = []
joins = set()
for expr in fields:
if expr == "":
continue
e_mapper = mapper
e_model_cls = model_cls
operator = None
method = None
for op in operator_order:
if op in expr:
operator = op
method = operator_to_method[op]
break
if operator is None:
raise Exception('No operator in expression "{0}".'.format(expr))
(column_names, value) = expr.split(operator)
column_names = column_names.split('__')
value = value.strip()
for column_name in column_names:
if column_name in arg_types:
typed_value = arg_types[column_name](value)
else:
typed_value = value
if column_name in e_mapper.relationships:
joins.add(column_name)
e_model_cls = e_mapper.attrs[column_name].mapper.class_
e_mapper = class_mapper(e_model_cls)
if hasattr(e_model_cls, column_name):
column = getattr(e_model_cls, column_name)
exprs.append(getattr(column, method)(typed_value))
else:
raise Exception('Invalid property {0} in class {1}.'.format(column_name, e_model_cls))
exprs = _join_equal_columns_to_or(exprs)
return query.join(*joins).filter(query_operator(*exprs))
def _join_equal_columns_to_or(filter_expressions):
columns = list(map(lambda expr: expr.get_children()[0], filter_expressions))
if len(columns) == len(set(columns)):
return filter_expressions
joined_expressions = {}
for filter_expression in filter_expressions:
column = filter_expression.get_children()[0]
if column in joined_expressions:
joined_expressions[column].append(filter_expression)
else:
joined_expressions[column] = [filter_expression]
final_expressions = []
for _, expressions in joined_expressions.items():
if len(expressions) == 0:
continue
elif len(expressions) == 1:
final_expressions.append(expressions[0])
else:
final_expressions.append(or_(*expressions))
return final_expressions
operator_to_method = {
'::like::': 'like',
'==': '__eq__',
'<=': '__le__',
'>=': '__ge__',
'!=': '__ne__',
'<': '__lt__',
'>': '__gt__'
}
operator_order = ['::like::', '==', '<=', '>=', '!=', '<', '>']
def order_from_url_arg(model_cls, query, arg):
fields = arg.split(',')
mapper = class_mapper(model_cls)
orderings = []
joins = []
for field in fields:
e_mapper = mapper
e_model_cls = model_cls
if field[0] == '-':
column_names = field[1:]
direction = 'desc'
else:
column_names = field
direction = 'asc'
column_names = column_names.split('__')
for column_name in column_names:
if column_name in e_mapper.relationships:
joins.append(column_name)
e_model_cls = e_mapper.attrs[column_name].mapper.class_
e_mapper = class_mapper(e_model_cls)
if hasattr(e_model_cls, column_name):
column = getattr(e_model_cls, column_name)
order_by = asc(column) if direction == 'asc' else desc(column)
orderings.append(order_by)
else:
raise Exception('Invalid property {0} in class {1}.'.format(column_name, model_cls))
return query.join(*joins).order_by(*orderings)
def create_related_tree(fields):
tree = {}
for field in fields:
fs = field.split('__')
node = tree
for f in fs:
if f not in node:
node[f] = {}
node = node[f]
# replace empty {} with None, to represent leafs
q = [tree]
while len(q) > 0:
node = q.pop()
for k, v in viewitems(node):
if len(v) > 0:
q.append(v)
else:
node[k] = None
return tree
def model_tree(name, model_cls, visited=None):
"""Create a simple tree of model's properties and its related models.
It traverse trough relations, but ignore any loops.
:param name: name of the model
:type name: str
:param model_cls: model class
:param visited: set of visited models
:type visited: list or None
:return: a dictionary where values are lists of string or other \
dictionaries
"""
if not visited:
visited = set()
visited.add(model_cls)
mapper = class_mapper(model_cls)
columns = [column.key for column in mapper.column_attrs]
related = [model_tree(rel.key, rel.mapper.entity, visited)
for rel in mapper.relationships if rel.mapper.entity not in visited]
return {name: columns + related}
def flat_model(tree):
"""Flatten the tree into a list of properties adding parents as prefixes."""
names = []
for columns in viewvalues(tree):
for col in columns:
if isinstance(col, dict):
col_name = list(col)[0]
names += [col_name + '__' + c for c in flat_model(col)]
else:
names.append(col)
return names
def apply_related(model_cls, query, related_fields):
"""
"""
mapper = class_mapper(model_cls)
loads = []
# TODO zatial iba pre
for field in related_fields:
if field in mapper.relationships:
loads.append(joinedload(getattr(model_cls, field)))
if loads:
query = query.options(*loads)
return query
def related_from_fields(fields):
return [field.rsplit('__', 1)[0] for field in fields if '__' in field]
| mit | 3,730,029,478,048,246,000 | 27.077253 | 108 | 0.567869 | false |
by46/simplekit | simplekit/url/url.py | 1 | 1887 | import six
from six.moves import urllib
import util
from .path import Path
from .query import Query
__author__ = 'benjamin.c.yan'
SAFE_SEGMENT_CHARS = ":@-._~!$&'()*+,;="
DEFAULT_PORTS = {
'ftp': 21,
'ssh': 22,
'http': 80,
'https': 443,
}
class URL(object):
def __init__(self, url):
self._host = self._port = None
self.username = self.password = None
self._scheme, self._netloc, path, query, fragment = urllib.parse.urlsplit(url)
self._path = Path(path)
self._query = Query(query)
self._fragment = fragment
if not self.port:
self._port = DEFAULT_PORTS.get(self._scheme)
@property
def path(self):
return self._path
@property
def query(self):
return self._query
@property
def scheme(self):
return self._scheme
@scheme.setter
def scheme(self, scheme):
if isinstance(scheme, six.string_types):
self._scheme = scheme.lower()
@property
def netloc(self):
return self._netloc
@netloc.setter
def netloc(self, value):
self._netloc = value
@property
def url(self):
return str(self)
@property
def host(self):
pass
@property
def port(self):
return self._port
@port.setter
def port(self, port):
if port is None:
self._port = DEFAULT_PORTS.get(self.scheme)
elif util.is_valid_port(port):
self._port = int(str(port))
else:
raise ValueError("Port is invalid port %s" % port)
@property
def fragment(self):
return self._fragment
def __str__(self):
url = urllib.parse.urlunsplit((
self.scheme,
self.netloc,
str(self.path),
self.query.encode(),
self.fragment
))
return url
| mit | 973,136,617,269,780,100 | 19.966667 | 86 | 0.550609 | false |
p-bro/potfit | tests/kim/test_kim_list_model_params.py | 3 | 1207 | import pytest
POTENTIAL_FILE_CONTENT = '''#F 5 1
#T ex_model_Ar_P_LJ
#C Ar
#E
KIM_PARAM cutoff
8.150000 8.150000 8.150000
KIM_PARAM epsilon
0.010400 0.010400 0.010400
KIM_PARAM sigma
3.400000 3.400000 3.400000
'''
def test_kim_model_params_dump_file(potfit):
potfit.create_param_file(kim_model_name='ex_model_Ar_P_LJ', kim_model_params='dump_file')
potfit.create_potential_file('''
#F 5 1
#T ex_model_Ar_P_LJ
#C Ar
#E
''')
potfit.run()
assert(POTENTIAL_FILE_CONTENT in potfit.get_file_content('ex_model_Ar_P_LJ.default'))
assert(potfit.has_no_error())
def test_kim_model_params_dump(potfit):
potfit.create_param_file(kim_model_name='ex_model_Ar_P_LJ', kim_model_params='dump')
potfit.create_potential_file('''
#F 5 1
#T ex_model_Ar_P_LJ
#C Ar
#E
''')
potfit.run()
assert(POTENTIAL_FILE_CONTENT in potfit.stdout)
assert(potfit.has_no_error())
def test_kim_model_params_use_default(potfit):
potfit.create_param_file(kim_model_name='ex_model_Ar_P_LJ', kim_model_params='use_default')
potfit.create_potential_file('''
#F 5 1
#T ex_model_Ar_P_LJ
#C Ar
#E''')
potfit.create_config_file(elements=['Ar'])
potfit.run()
assert(potfit.has_no_error())
| gpl-2.0 | 2,641,530,948,671,059,500 | 22.666667 | 95 | 0.684341 | false |
scottw13/BET-1 | bet/sensitivity/gradients.py | 1 | 23932 | # Copyright (C) 2014-2015 The BET Development Team
"""
This module contains functions for approximating gradient vectors of QoI maps.
All methods that cluster points around centers are written to return the samples
in the following order : CENTERS, FOLLOWED BY THE CLUSTER AROUND THE FIRST
CENTER, THEN THE CLUSTER AROUND THE SECOND CENTER AND SO ON.
"""
import numpy as np
import scipy.spatial as spatial
import scipy.misc as misc
import bet.util as util
import sys
from itertools import combinations
def sample_linf_ball(centers, num_close, rvec, lam_domain=None):
r"""
Pick num_close points in a the l-infinity ball of length 2*rvec around a
point in :math:`\Lambda`, do this for each point in centers. If this box
extends outside of :math:`\Lambda`, we sample the intersection.
:param centers: Points in :math:`\Lambda` to cluster points around
:type centers: :class:`np.ndarray` of shape (num_centers, Lambda_dim)
:param int num_close: Number of points in each cluster
:param rvec: Each side of the box will have length 2*rvec[i]
:type rvec: :class:`np.ndarray` of shape (Lambda_dim,)
:param lam_domain: The domain of the parameter space
:type lam_domain: :class:`np.ndarray` of shape (Lambda_dim, 2)
:rtype: :class:`np.ndarray` of shape ((num_close+1)*num_centers, Lambda_dim)
:returns: Centers and clusters of samples near each center
"""
Lambda_dim = centers.shape[1]
num_centers = centers.shape[0]
rvec = util.fix_dimensions_vector(rvec)
#If no lam_domain, set domain large
if lam_domain is None:
lam_domain = np.zeros([Lambda_dim, 2])
lam_domain[:, 0] = -sys.float_info[0]
lam_domain[:, 1] = sys.float_info[0]
# Define bounds for each box
left = np.maximum(
centers - rvec, np.ones([num_centers, Lambda_dim]) * lam_domain[:, 0])
right = np.minimum(
centers + rvec, np.ones([num_centers, Lambda_dim]) * lam_domain[:, 1])
# Samples each box uniformly
samples = np.repeat(right - left, num_close, axis=0) * np.random.random(
[num_centers * num_close, Lambda_dim]) + np.repeat(left, num_close, \
axis=0)
return np.concatenate([centers, samples])
def sample_l1_ball(centers, num_close, rvec):
r"""
Uniformly sample the l1-ball (defined by 2^dim simplices). Then scale
each dimension according to rvec and translate the center to centers.
Do this for each point in centers. *This method currently allows
samples to be placed outside of lam_domain. Please place your
centers accordingly.*
:param centers: Points in :math:`\Lambda` to cluster samples around
:type centers: :class:`np.ndarray` of shape (num_centers, Ldim)
:param int num_close: Number of samples in each l1 ball
:param rvec: The radius of the l1 ball, along each axis
:type rvec: :class:`np.ndarray` of shape (Lambda_dim)
:rtype: :class:`np.ndarray` of shape ((num_close+1)*num_centers, Lambda_dim)
:returns: Uniform random samples from an l1 ball around each center
"""
Lambda_dim = centers.shape[1]
rvec = util.fix_dimensions_vector(rvec)
samples = np.zeros([(num_close + 1) * centers.shape[0], centers.shape[1]])
samples[0:centers.shape[0], :] = centers
# We choose weighted random distance from the center for each new sample
random_dist = np.random.random([num_close, 1])
weight_vec = random_dist**(1. / Lambda_dim)
# For each center, randomly sample the l1_ball
for cen in range(centers.shape[0]):
# Begin by uniformly sampling the unit simplex in the first quadrant
# Choose Lambda_dim-1 reals uniformly between 0 and weight_vec for each
# new sample
random_mat = np.random.random([num_close, Lambda_dim - 1]) * \
np.tile(weight_vec, (1, Lambda_dim - 1))
# Sort the random_mat
random_mat = np.sort(random_mat, 1)
# Contrust weight_mat so that the first column is zeros, the next
# Lambda_dim-1 columns are the sorted reals between 0 and weight_vec,
# and the last column is weight_vec.
weight_mat = np.zeros([num_close, Lambda_dim + 1])
weight_mat[:, 1:Lambda_dim] = random_mat
weight_mat[:, Lambda_dim] = np.array(weight_vec).transpose()
# The differences between the Lambda_dim+1 columns will give us
# random points in the unit simplex of dimension Lambda_dim.
samples_cen = np.zeros([num_close, Lambda_dim])
for Ldim in range(Lambda_dim):
samples_cen[:, Ldim] = weight_mat[:, Ldim + 1] - weight_mat[:, Ldim]
# Assign a random sign to each element of each new sample
# This give us samples in the l1_ball, not just the unit simplex in
# the first quadrant
rand_sign = 2 * np.round(np.random.random([num_close, Lambda_dim])) - 1
samples_cen = samples_cen * rand_sign
# Scale each dimension according to rvec and translate to center
samples_cen = samples_cen * rvec + centers[cen, :]
# Append newsamples to samples
samples[centers.shape[0] + cen * num_close:centers.shape[0] + \
(cen + 1) * num_close, :] = samples_cen
return samples
def pick_ffd_points(centers, rvec):
r"""
Pick Lambda_dim points, for each centers, for a forward finite
difference gradient approximation. The points are returned in the order:
centers, followed by the cluster around the first center, then the cluster
around the second center and so on.
:param centers: Points in :math:`\Lambda` the place stencil around
:type centers: :class:`np.ndarray` of shape (num_centers, Lambda_dim)
:param rvec: The radius of the stencil, along each axis
:type rvec: :class:`np.ndarray` of shape (Lambda_dim,)
:rtype: :class:`np.ndarray` of shape ((Lambda_dim+1)*num_centers,
Lambda_dim)
:returns: Samples for centered finite difference stencil for
each point in centers.
"""
Lambda_dim = centers.shape[1]
num_centers = centers.shape[0]
samples = np.repeat(centers, Lambda_dim, axis=0)
rvec = util.fix_dimensions_vector(rvec)
# Construct a [num_centers*(Lambda_dim+1), Lambda_dim] matrix that
# translates the centers to the FFD points.
translate = np.tile(np.eye(Lambda_dim) * rvec, (num_centers, 1))
samples = samples + translate
return np.concatenate([centers, samples])
def pick_cfd_points(centers, rvec):
r"""
Pick 2*Lambda_dim points, for each center, for centered finite difference
gradient approximation. The center are not needed for the CFD gradient
approximation, they are returned for consistency with the other methods and
because of the common need to have not just the gradient but also the QoI
value at the centers in adaptive sampling algorithms.The points are returned
in the order: centers, followed by the cluster around the first center, then
the cluster around the second center and so on.
:param centers: Points in :math:`\Lambda` to cluster points around
:type centers: :class:`np.ndarray` of shape (num_centers, Lambda_dim)
:param rvec: The radius of the stencil, along each axis
:type rvec: :class:`np.ndarray` of shape (Lambda_dim,)
:rtype: :class:`np.ndarray` of shape ((2*Lambda_dim+1)*num_centers,
Lambda_dim)
:returns: Samples for centered finite difference stencil for
each point in centers.
"""
Lambda_dim = centers.shape[1]
num_centers = centers.shape[0]
samples = np.repeat(centers, 2 * Lambda_dim, axis=0)
rvec = util.fix_dimensions_vector(rvec)
# Contstruct a [num_centers*2*Lambda_dim, Lambda_dim] matrix that
# translates the centers to the CFD points
ident = np.eye(Lambda_dim) * rvec
translate = np.tile(np.append(ident, -ident, axis=0), (num_centers, 1))
samples = samples + translate
return np.concatenate([centers, samples])
def radial_basis_function(r, kernel=None, ep=None):
"""
Evaluate a chosen radial basis function. Allow for the choice of several
radial basis functions to use in
:meth:~bet.sensitivity.gradients.calculate_gradients_rbf
:param r: Distances from the reference point
:type r: :class:`np.ndarray`
:param string kernel: Choice of radial basis funtion. Default is C4Matern
:param float ep: Shape parameter for the radial basis function.
Default is 1.0
:rtype: :class:`np.ndarray` of shape (r.shape)
:returns: Radial basis function evaluated for each element of r
"""
if ep is None:
ep = 1.0
if kernel is None or kernel is 'C4Matern':
rbf = (1 + (ep * r) + (ep * r)**2 / 3) * np.exp(-ep * r)
elif kernel is 'Gaussian':
rbf = np.exp(-(ep * r)**2)
elif kernel is 'Multiquadric':
rbf = (1 + (ep * r)**2)**(0.5)
elif kernel is 'InverseMultiquadric':
rbf = 1 / ((1 + (ep * r)**2)**(0.5))
else:
raise ValueError("The kernel chosen is not currently available.")
return rbf
def radial_basis_function_dxi(r, xi, kernel=None, ep=None):
"""
Evaluate a partial derivative of a chosen radial basis function. Allow for
the choice of several radial basis functions to use in the
:meth:~bet.sensitivity.gradients.calculate_gradients_rbf.
:param r: Distances from the reference point
:type r: :class:`np.ndarray`
:param xi: Distances from the reference point in dimension i
:type xi: :class:`np.ndarray`
:param string kernel: Choice of radial basis funtion. Default is C4Matern
:param float ep: Shape parameter for the radial basis function.
Default is 1.0
:rtype: :class:`np.ndarray` of shape (r.shape)
:returns: Radial basis function evaluated for each element of r
"""
if ep is None:
ep = 1.0
if kernel is None or kernel is 'C4Matern':
rbfdxi = -(ep**2 * xi * np.exp(-ep * r) * (ep * r + 1)) / 3
elif kernel is 'Gaussian':
rbfdxi = -2 * ep**2 * xi * np.exp(-(ep * r)**2)
elif kernel is 'Multiquadric':
rbfdxi = (ep**2 * xi) / ((1 + (ep * r)**2)**(0.5))
elif kernel is 'InverseMultiquadric':
rbfdxi = -(ep**2 * xi) / ((1 + (ep * r)**2)**(1.5))
else:
raise ValueError("The kernel chosen is not currently available")
return rbfdxi
def calculate_gradients_rbf(samples, data, centers=None, num_neighbors=None,
RBF=None, ep=None, normalize=True):
r"""
Approximate gradient vectors at ``num_centers, centers.shape[0]`` points
in the parameter space for each QoI map using a radial basis function
interpolation method.
:param samples: Samples for which the model has been solved.
:type samples: :class:`np.ndarray` of shape (num_samples, Lambda_dim)
:param data: QoI values corresponding to each sample.
:type data: :class:`np.ndarray` of shape (num_samples, Data_dim)
:param centers: Points in :math:`\Lambda` at which to approximate gradient
information.
:type centers: :class:`np.ndarray` of shape (num_exval, Lambda_dim)
:param int num_neighbors: Number of nearest neighbors to use in gradient
approximation. Default value is Lambda_dim + 2.
:param string RBF: Choice of radial basis function. Default is Gaussian
:param float ep: Choice of shape parameter for radial basis function.
Default value is 1.0
:param boolean normalize: If normalize is True, normalize each gradient
vector
:rtype: :class:`np.ndarray` of shape (num_samples, Data_dim, Lambda_dim)
:returns: Tensor representation of the gradient vectors of each
QoI map at each point in centers
"""
data = util.fix_dimensions_vector_2darray(util.clean_data(data))
Lambda_dim = samples.shape[1]
num_model_samples = samples.shape[0]
Data_dim = data.shape[1]
if num_neighbors is None:
num_neighbors = Lambda_dim + 2
if ep is None:
ep = 1.0
if RBF is None:
RBF = 'Gaussian'
# If centers is None we assume the user chose clusters of size
# Lambda_dim + 2
if centers is None:
num_centers = num_model_samples / (Lambda_dim + 2)
centers = samples[:num_centers]
else:
num_centers = centers.shape[0]
rbf_tensor = np.zeros([num_centers, num_model_samples, Lambda_dim])
gradient_tensor = np.zeros([num_centers, Data_dim, Lambda_dim])
tree = spatial.KDTree(samples)
# For each centers, interpolate the data using the rbf chosen and
# then evaluate the partial derivative of that rbf at the desired point.
for c in range(num_centers):
# Find the k nearest neighbors and their distances to centers[c,:]
[r, nearest] = tree.query(centers[c, :], k=num_neighbors)
r = np.tile(r, (Lambda_dim, 1))
# Compute the linf distances to each of the nearest neighbors
diffVec = (centers[c, :] - samples[nearest, :]).transpose()
# Compute the l2 distances between pairs of nearest neighbors
distMat = spatial.distance_matrix(
samples[nearest, :], samples[nearest, :])
# Solve for the rbf weights using interpolation conditions and
# evaluate the partial derivatives
rbf_mat_values = \
np.linalg.solve(radial_basis_function(distMat, RBF),
radial_basis_function_dxi(r, diffVec, RBF, ep) \
.transpose()).transpose()
# Construct the finite difference matrices
rbf_tensor[c, nearest, :] = rbf_mat_values.transpose()
gradient_tensor = rbf_tensor.transpose(2, 0, 1).dot(data).transpose(1, 2, 0)
if normalize:
# Compute the norm of each vector
norm_gradient_tensor = np.linalg.norm(gradient_tensor, ord=1, axis=2)
# If it is a zero vector (has 0 norm), set norm=1, avoid divide by zero
norm_gradient_tensor[norm_gradient_tensor == 0] = 1.0
# Normalize each gradient vector
gradient_tensor = gradient_tensor/np.tile(norm_gradient_tensor,
(Lambda_dim, 1, 1)).transpose(1, 2, 0)
return gradient_tensor
def calculate_gradients_ffd(samples, data, normalize=True):
"""
Approximate gradient vectors at ``num_centers, centers.shape[0]`` points
in the parameter space for each QoI map. THIS METHOD IS DEPENDENT ON USING
:meth:~bet.sensitivity.gradients.pick_ffd_points TO CHOOSE SAMPLES FOR THE
FFD STENCIL AROUND EACH CENTER. THE ORDERING MATTERS.
:param samples: Samples for which the model has been solved.
:type samples: :class:`np.ndarray` of shape (num_samples, Lambda_dim)
:param data: QoI values corresponding to each sample.
:type data: :class:`np.ndarray` of shape (num_samples, Data_dim)
:param boolean normalize: If normalize is True, normalize each gradient
vector
:rtype: :class:`np.ndarray` of shape (num_samples, Data_dim, Lambda_dim)
:returns: Tensor representation of the gradient vectors of each
QoI map at each point in centers
"""
num_model_samples = samples.shape[0]
Lambda_dim = samples.shape[1]
num_centers = num_model_samples / (Lambda_dim + 1)
# Find rvec from the first cluster of samples
rvec = samples[num_centers:num_centers + Lambda_dim, :] - samples[0, :]
rvec = util.fix_dimensions_vector_2darray(rvec.diagonal())
# Clean the data
data = util.fix_dimensions_vector_2darray(util.clean_data(data))
num_qois = data.shape[1]
gradient_tensor = np.zeros([num_centers, num_qois, Lambda_dim])
rvec = np.tile(np.repeat(rvec, num_qois, axis=1), [num_centers, 1])
# Compute the gradient vectors using the standard FFD stencil
gradient_mat = (data[num_centers:] - np.repeat(data[0:num_centers], \
Lambda_dim, axis=0)) * (1. / rvec)
# Reshape and organize
gradient_tensor = np.reshape(gradient_mat.transpose(), [num_qois,
Lambda_dim, num_centers], order='F').transpose(2, 0, 1)
if normalize:
# Compute the norm of each vector
norm_gradient_tensor = np.linalg.norm(gradient_tensor, ord=1, axis=2)
# If it is a zero vector (has 0 norm), set norm=1, avoid divide by zero
norm_gradient_tensor[norm_gradient_tensor == 0] = 1.0
# Normalize each gradient vector
gradient_tensor = gradient_tensor/np.tile(norm_gradient_tensor,
(Lambda_dim, 1, 1)).transpose(1, 2, 0)
return gradient_tensor
def calculate_gradients_cfd(samples, data, normalize=True):
"""
Approximate gradient vectors at ``num_centers, centers.shape[0]`` points
in the parameter space for each QoI map. THIS METHOD IS DEPENDENT
ON USING :meth:~bet.sensitivity.pick_cfd_points TO CHOOSE SAMPLES FOR THE
CFD STENCIL AROUND EACH CENTER. THE ORDERING MATTERS.
:param samples: Samples for which the model has been solved.
:type samples: :class:`np.ndarray` of shape
(2*Lambda_dim*num_centers, Lambda_dim)
:param data: QoI values corresponding to each sample.
:type data: :class:`np.ndarray` of shape (num_samples, Data_dim)
:param boolean normalize: If normalize is True, normalize each gradient
vector
:rtype: :class:`np.ndarray` of shape (num_samples, Data_dim, Lambda_dim)
:returns: Tensor representation of the gradient vectors of each
QoI map at each point in centers
"""
num_model_samples = samples.shape[0]
Lambda_dim = samples.shape[1]
num_centers = num_model_samples / (2*Lambda_dim + 1)
# Find rvec from the first cluster of samples
rvec = samples[num_centers:num_centers + Lambda_dim, :] - samples[0, :]
rvec = util.fix_dimensions_vector_2darray(rvec.diagonal())
# Clean the data
data = util.fix_dimensions_vector_2darray(util.clean_data(
data[num_centers:]))
num_qois = data.shape[1]
gradient_tensor = np.zeros([num_centers, num_qois, Lambda_dim])
rvec = np.tile(np.repeat(rvec, num_qois, axis=1), [num_centers, 1])
# Construct indices for CFD gradient approxiation
inds = np.repeat(range(0, 2 * Lambda_dim * num_centers, 2 * Lambda_dim),
Lambda_dim) + np.tile(range(0, Lambda_dim), num_centers)
inds = np.array([inds, inds+Lambda_dim]).transpose()
gradient_mat = (data[inds[:, 0]] - data[inds[:, 1]]) * (0.5 / rvec)
# Reshape and organize
gradient_tensor = np.reshape(gradient_mat.transpose(), [num_qois,
Lambda_dim, num_centers], order='F').transpose(2, 0, 1)
if normalize:
# Compute the norm of each vector
norm_gradient_tensor = np.linalg.norm(gradient_tensor, ord=1, axis=2)
# If it is a zero vector (has 0 norm), set norm=1, avoid divide by zero
norm_gradient_tensor[norm_gradient_tensor == 0] = 1.0
# Normalize each gradient vector
gradient_tensor = gradient_tensor/np.tile(norm_gradient_tensor,
(Lambda_dim, 1, 1)).transpose(1, 2, 0)
return gradient_tensor
###############################################################
# Hessian Methods Below
###############################################################
def pick_hessian_points(centers, rvec):
r"""
THESE ARE ORDERED : CENTERS, THEN CLUSTER AROUND FIRST CENTER, CLUSTER AROUND
SECOND CENTER AND SO ON.
INSIDE EACH CLUSTER, THEY ARE ORDERED : x1x2, x1x3, ..., x2x3, ..., xn-1,xn
x1x1, x2x2, ..., xnxn
:param centers: Points in :math:`\mathcal{Q}` to cluster points around
:type centers: :class:`np.ndarray` of shape (num_centers, Q_dim)
:param rvec: The radius of the stencil, along each axis
:type rvec: :class:`np.ndarray` of shape (Q_dim,)
:rtype: :class:`np.ndarray` of shape (((Q_dimCHOOSE2 + Q_dim)*4 + 1) * num_centers,
Qambda_dim)
:returns: Samples for centered finite difference stencil for
each point in centers.
"""
Q_dim = centers.shape[1]
num_centers = centers.shape[0]
num_stencilpts = (int(round(misc.comb(Q_dim, 2))) + Q_dim) * 4
samples = np.repeat(centers, num_stencilpts, axis=0)
rvec = util.fix_dimensions_vector(rvec)
stencil = np.zeros([num_stencilpts, Q_dim])
onesx = np.array([1, 1, -1, -1])
onesy = np.array([1, -1, 1, -1])
stencxx = np.array([1, 2, -1, -2])
combs = np.array(list(combinations(range(Q_dim), 2)))
for i in range(combs.shape[0]):
stencil[4 * i:4 * (i + 1), combs[i, 0]] = onesx
stencil[4 * i:4 * (i + 1), combs[i, 1]] = onesy
for i in range(combs.shape[0], combs.shape[0] + Q_dim):
stencil[4 * i:4 * (i + 1), i - combs.shape[0]] = stencxx
# Contstruct a [((Q_dimCHOOSE2 + Q_dim)*4 + 1) * num_centers, Q_dim] matrix that
# translates the centers to the HESSIAN points
translate = np.tile(stencil, (num_centers, 1)) * rvec
print stencil
samples = samples + translate
return np.concatenate([centers, samples])
def calculate_hessian(samples, data):
"""
:param samples: Samples for which the model has been solved.
:type samples: :class:`np.ndarray` of shape
(((Q_dimCHOOSE2 + Q_dim)*4 + 1) * num_centers, Q_dim)
:param data: Data values corresponding to each sample.
:type data: :class:`np.ndarray` of shape (num_samples, 1)
:rtype: :class:`np.ndarray` of shape (Q_dim, Q_dim, num_centers)
:returns: Tensor representation of the Hessian at each center
"""
num_samples = samples.shape[0]
Q_dim = samples.shape[1]
num_samples_percenter = (int(round(misc.comb(Q_dim, 2))) + Q_dim) * 4 + 1
num_centers = num_samples / num_samples_percenter
num_partials = int(round(misc.comb(Q_dim, 2)))
combs = np.array(list(combinations(range(Q_dim), 2)))
# Find rvec from the first cluster of samples
rvec = np.linalg.norm(samples[num_centers , :] - samples[num_centers + 1, :]) / 2.
rvec = util.fix_dimensions_vector_2darray(rvec)
# Organie the data corresponsing to second deriviative and mixed partials
hessian_tensor = list()
for c in range(num_centers):
inds_temp_data = np.append(np.array([c]), np.array(range(num_centers + c * (num_samples_percenter - 1), num_centers + (c + 1) * (num_samples_percenter - 1))))
data_temp = data[inds_temp_data]
scnd_inds = np.append(np.array([0]), np.array(range(1 + num_partials * 4, 1 + num_partials * 4 + Q_dim * 4)))
partial_inds = range(1, num_partials * 4 + 1)
data_scnd = util.fix_dimensions_vector_2darray(util.clean_data(
data_temp[scnd_inds]))
data_partials = util.fix_dimensions_vector_2darray(util.clean_data(
data_temp[partial_inds]))
# Construct indices for CFD gradient approxiation
inds = np.array(range(0, 4 * num_partials)).reshape(num_partials, 4)
hessian_vec_partials = (data_partials[inds[:, 0]] - data_partials[inds[:, 1]] - data_partials[inds[:, 2]] + data_partials[inds[:, 3]]) / (4 * rvec)
inds = np.array(range(1, Q_dim * 4 + 1)).reshape(Q_dim, 4)
inds = np.append(np.zeros([Q_dim, 1], dtype=int), inds, axis=1)
hessian_vec_scnds = (-30 * data_scnd[inds[:, 0]] + 16 * data_scnd[inds[:, 1]] - data_scnd[inds[:, 2]] + 16 * data_scnd[inds[:, 3]] - data_scnd[inds[:, 4]]) / (12 * rvec ** 2)
hessian_mat = np.diag(hessian_vec_scnds[:, 0])
# Fill the upper and lower triangles with the partials
hessian_mat[combs[:,0], combs[:, 1]] = hessian_vec_partials[:, 0]
hessian_mat[combs[:,1], combs[:, 0]] = hessian_vec_partials[:, 0]
# Now add the current hessian_mat to the hessian_tensor (currently a list
# that will be changed to a tensor before returing
hessian_tensor.append(hessian_mat)
# Reshape and organize
#gradient_tensor = np.reshape(gradient_mat.transpose(), [1,
# Lambda_dim, num_centers], order='F').transpose(2, 0, 1)
return np.array(hessian_tensor)
| gpl-3.0 | 8,716,650,759,643,292,000 | 41.282686 | 182 | 0.648504 | false |
pscedu/slash2-stable | zfs-fuse/contrib/solaris/fixfiles.py | 1 | 1775 | #!/usr/bin/env python
import sys, os
from stat import *
def remove_comments(fin, fout):
sin = fin.read()
sout = ''
while 1:
# Here we are at the start of a line
i = sin.find('\n')
if i == -1:
# File ended
sout += sin
break
line = sin[:i]
ignore_cchar = False
if line.lstrip().startswith('#'):
# The line is a preprocessor directive, the comment character is ignored
ignore_cchar = True
i2 = line.find('/')
if i2 == -1:
# Add the whole line
sout += sin[:i + 1]
sin = sin[i + 1:]
continue
# Check if it's a /* token
if (i2 + 1) < len(line) and line[i2 + 1] == '*':
i3 = sin.find('*/', i2 + 2)
if i3 == -1:
# File ended
sout += sin
break
# Add everything until the end of the comment
sout += sin[:i3 + 2]
sin = sin[i3 + 2:]
continue
if not ignore_cchar:
# This line has a valid comment character
sout += line[:i2].rstrip() + '\n'
else:
# Otherwise, add the whole line
sout += sin[:i + 1]
sin = sin[i + 1:]
fout.write(sout)
def remove_pragmas(fin, fout):
s = fin.readline()
while s != '':
for pragma in ('ident', 'rarely_called'):
if s.startswith('#pragma ' + pragma):
s = '\n'
break
fout.write(s)
s = fin.readline()
def replace_file(finname, fun):
mode = os.lstat(finname)[ST_MODE]
if not S_ISREG(mode):
return
foutname = finname + '.bak'
fin = file(finname, 'r')
fout = file(foutname, 'w')
fun(fin, fout)
fin.close()
fout.close()
os.rename(foutname, finname)
for root, dirs, files in os.walk(sys.argv[1]):
for filename in files:
name = os.path.join(root, filename)
ext = os.path.splitext(name)[1]
if ext in ('.c', '.h'):
replace_file(name, remove_pragmas)
elif ext == '.S':
replace_file(name, remove_comments)
| isc | 2,800,599,929,985,834,500 | 17.882979 | 75 | 0.589296 | false |
robwebset/script.ebooks | resources/lib/mobi/lz77.py | 5 | 3054 | import struct
# ported directly from the PalmDoc Perl library
# http://kobesearch.cpan.org/htdocs/EBook-Tools/EBook/Tools/PalmDoc.pm.html
def uncompress_lz77(data):
length = len(data);
offset = 0; # Current offset into data
# char; # Character being examined
# ord; # Ordinal of $char
# lz77; # 16-bit Lempel-Ziv 77 length-offset pair
# lz77offset; # LZ77 offset
# lz77length; # LZ77 length
# lz77pos; # Position inside $lz77length
text = ''; # Output (uncompressed) text
# textlength; # Length of uncompressed text during LZ77 pass
# textpos; # Position inside $text during LZ77 pass
while offset < length:
# char = substr($data,$offset++,1);
char = data[offset];
offset += 1;
ord_ = ord(char);
# print " ".join([repr(char), hex(ord_)])
# The long if-elsif chain is the best logic for $ord handling
## no critic (Cascading if-elsif chain)
if (ord_ == 0):
# Nulls are literal
text += char;
elif (ord_ <= 8):
# Next $ord bytes are literal
text += data[offset:offset+ord_] # text .=substr($data,$offset,ord);
offset += ord_;
elif (ord_ <= 0x7f):
# Values from 0x09 through 0x7f are literal
text += char;
elif (ord_ <= 0xbf):
# Data is LZ77-compressed
# From Wikipedia:
# "A length-distance pair is always encoded by a two-byte
# sequence. Of the 16 bits that make up these two bytes,
# 11 bits go to encoding the distance, 3 go to encoding
# the length, and the remaining two are used to make sure
# the decoder can identify the first byte as the beginning
# of such a two-byte sequence."
offset += 1;
if (offset > len(data)):
print("WARNING: offset to LZ77 bits is outside of the data: %d" % offset);
return text;
lz77, = struct.unpack('>H', data[offset-2:offset])
# Leftmost two bits are ID bits and need to be dropped
lz77 &= 0x3fff;
# Length is rightmost 3 bits + 3
lz77length = (lz77 & 0x0007) + 3;
# Remaining 11 bits are offset
lz77offset = lz77 >> 3;
if (lz77offset < 1):
print("WARNING: LZ77 decompression offset is invalid!");
return text;
# Getting text from the offset is a little tricky, because
# in theory you can be referring to characters you haven't
# actually decompressed yet. You therefore have to check
# the reference one character at a time.
textlength = len(text);
for lz77pos in range(lz77length): # for($lz77pos = 0; $lz77pos < $lz77length; $lz77pos++)
textpos = textlength - lz77offset;
if (textpos < 0):
print("WARNING: LZ77 decompression reference is before"+
" beginning of text! %x" % lz77);
return;
text += text[textpos:textpos+1]; #text .= substr($text,$textpos,1);
textlength+=1;
else:
# 0xc0 - 0xff are single characters (XOR 0x80) preceded by
# a space
text += ' ' + chr(ord_ ^ 0x80);
return text;
| gpl-2.0 | 3,504,326,720,525,281,300 | 34.511628 | 95 | 0.611657 | false |
zenlambda/pip | tests/unit/test_cmdoptions.py | 40 | 1970 | import pip
from pip.basecommand import Command
from pip import cmdoptions
class SimpleCommand(Command):
name = 'fake'
summary = name
def __init__(self):
super(SimpleCommand, self).__init__()
self.cmd_opts.add_option(cmdoptions.no_use_wheel())
self.cmd_opts.add_option(cmdoptions.no_binary())
self.cmd_opts.add_option(cmdoptions.only_binary())
def run(self, options, args):
cmdoptions.resolve_wheel_no_use_binary(options)
self.options = options
def test_no_use_wheel_sets_no_binary_all():
cmd = SimpleCommand()
cmd.main(['fake', '--no-use-wheel'])
expected = pip.index.FormatControl(set([':all:']), set([]))
assert cmd.options.format_control == expected
def test_no_binary_overrides():
cmd = SimpleCommand()
cmd.main(['fake', '--only-binary=:all:', '--no-binary=fred'])
expected = pip.index.FormatControl(set(['fred']), set([':all:']))
assert cmd.options.format_control == expected
def test_only_binary_overrides():
cmd = SimpleCommand()
cmd.main(['fake', '--no-binary=:all:', '--only-binary=fred'])
expected = pip.index.FormatControl(set([':all:']), set(['fred']))
assert cmd.options.format_control == expected
def test_none_resets():
cmd = SimpleCommand()
cmd.main(['fake', '--no-binary=:all:', '--no-binary=:none:'])
expected = pip.index.FormatControl(set([]), set([]))
assert cmd.options.format_control == expected
def test_none_preserves_other_side():
cmd = SimpleCommand()
cmd.main(
['fake', '--no-binary=:all:', '--only-binary=fred',
'--no-binary=:none:'])
expected = pip.index.FormatControl(set([]), set(['fred']))
assert cmd.options.format_control == expected
def test_comma_separated_values():
cmd = SimpleCommand()
cmd.main(['fake', '--no-binary=1,2,3'])
expected = pip.index.FormatControl(set(['1', '2', '3']), set([]))
assert cmd.options.format_control == expected
| mit | 8,077,742,803,930,349,000 | 30.774194 | 69 | 0.63198 | false |
yuxiaobu/nansat | mappers/mapper_generic.py | 1 | 11990 | # Name: mapper_generic.py
# Purpose: Generic Mapper for L3/L4 satellite or modeling data
# Authors: Asuka Yamakava, Anton Korosov, Morten Wergeland Hansen
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
#import os
from vrt import *
from nansat_tools import Node, latlongSRS
import numpy as np
class Mapper(VRT):
def __init__(self, fileName, gdalDataset, gdalMetadata, logLevel=30,
rmMetadatas=['NETCDF_VARNAME', '_Unsigned',
'ScaleRatio', 'ScaleOffset', 'dods_variable'],
**kwargs):
# Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_' from keys in gdalDataset
tmpGdalMetadata = {}
geoMetadata = {}
origin_is_nansat = False
for key in gdalMetadata.keys():
newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '')
if 'NANSAT_' in newKey:
geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key]
origin_is_nansat = True
else:
tmpGdalMetadata[newKey] = gdalMetadata[key]
gdalMetadata = tmpGdalMetadata
fileExt = os.path.splitext(fileName)[1]
# Get file names from dataset or subdataset
subDatasets = gdalDataset.GetSubDatasets()
if len(subDatasets) == 0:
fileNames = [fileName]
else:
fileNames = [f[0] for f in subDatasets]
# add bands with metadata and corresponding values to the empty VRT
metaDict = []
geoFileDict = {}
xDatasetSource = ''
yDatasetSource = ''
firstXSize = 0
firstYSize = 0
for i, fileName in enumerate(fileNames):
subDataset = gdal.Open(fileName)
# choose the first dataset whith grid
if (firstXSize == 0 and firstYSize == 0 and
subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1):
firstXSize = subDataset.RasterXSize
firstYSize = subDataset.RasterYSize
firstSubDataset = subDataset
# get projection from the first subDataset
projection = firstSubDataset.GetProjection()
# take bands whose sizes are same as the first band.
if (subDataset.RasterXSize == firstXSize and
subDataset.RasterYSize == firstYSize):
if projection == '':
projection = subDataset.GetProjection()
if ('GEOLOCATION_X_DATASET' in fileName or
'longitude' in fileName):
xDatasetSource = fileName
elif ('GEOLOCATION_Y_DATASET' in fileName or
'latitude' in fileName):
yDatasetSource = fileName
else:
for iBand in range(subDataset.RasterCount):
subBand = subDataset.GetRasterBand(iBand+1)
bandMetadata = subBand.GetMetadata_Dict()
if 'PixelFunctionType' in bandMetadata:
bandMetadata.pop('PixelFunctionType')
sourceBands = iBand + 1
#sourceBands = i*subDataset.RasterCount + iBand + 1
# generate src metadata
src = {'SourceFilename': fileName,
'SourceBand': sourceBands}
# set scale ratio and scale offset
scaleRatio = bandMetadata.get(
'ScaleRatio',
bandMetadata.get(
'scale',
bandMetadata.get('scale_factor', '')))
if len(scaleRatio) > 0:
src['ScaleRatio'] = scaleRatio
scaleOffset = bandMetadata.get(
'ScaleOffset',
bandMetadata.get(
'offset',
bandMetadata.get(
'add_offset', '')))
if len(scaleOffset) > 0:
src['ScaleOffset'] = scaleOffset
# sate DataType
src['DataType'] = subBand.DataType
# generate dst metadata
# get all metadata from input band
dst = bandMetadata
# set wkv and bandname
dst['wkv'] = bandMetadata.get('standard_name', '')
bandName = bandMetadata.get('NETCDF_VARNAME', '') # could we also use bandMetadata.get('name')?
if len(bandName) == 0:
bandName = bandMetadata.get('dods_variable', '')
if len(bandName) > 0:
if origin_is_nansat and fileExt == '.nc':
# remove digits added by gdal in exporting to
# netcdf...
if bandName[-1:].isdigit():
bandName = bandName[:-1]
if bandName[-1:].isdigit():
bandName = bandName[:-1]
dst['name'] = bandName
else:
dst['name'] = bandName
# remove non-necessary metadata from dst
for rmMetadata in rmMetadatas:
if rmMetadata in dst:
dst.pop(rmMetadata)
# append band with src and dst dictionaries
metaDict.append({'src': src, 'dst': dst})
# create empty VRT dataset with geolocation only
VRT.__init__(self, firstSubDataset, srcMetadata=gdalMetadata)
# add bands with metadata and corresponding values to the empty VRT
self._create_bands(metaDict)
# Create complex data bands from 'xxx_real' and 'xxx_imag' bands
# using pixelfunctions
rmBands = []
for iBandNo in range(self.dataset.RasterCount):
iBand = self.dataset.GetRasterBand(iBandNo + 1)
iBandName = iBand.GetMetadataItem('name')
# find real data band
if iBandName.find("_real") != -1:
realBandNo = iBandNo
realBand = self.dataset.GetRasterBand(realBandNo + 1)
realDtype = realBand.GetMetadataItem('DataType')
bandName = iBandName.replace(iBandName.split('_')[-1],
'')[0:-1]
for jBandNo in range(self.dataset.RasterCount):
jBand = self.dataset.GetRasterBand(jBandNo + 1)
jBandName = jBand.GetMetadataItem('name')
# find an imaginary data band corresponding to the real
# data band and create complex data band from the bands
if jBandName.find(bandName+'_imag') != -1:
imagBandNo = jBandNo
imagBand = self.dataset.GetRasterBand(imagBandNo + 1)
imagDtype = imagBand.GetMetadataItem('DataType')
dst = imagBand.GetMetadata()
dst['name'] = bandName
dst['PixelFunctionType'] = 'ComplexData'
dst['dataType'] = 10
src = [{'SourceFilename': fileNames[realBandNo],
'SourceBand': 1,
'DataType': realDtype},
{'SourceFilename': fileNames[imagBandNo],
'SourceBand': 1,
'DataType': imagDtype}]
self._create_band(src, dst)
self.dataset.FlushCache()
rmBands.append(realBandNo + 1)
rmBands.append(imagBandNo + 1)
# Delete real and imaginary bands
if len(rmBands) != 0:
self.delete_bands(rmBands)
if len(projection) == 0:
# projection was not set automatically
# get projection from GCPProjection
projection = geoMetadata.get('GCPProjection', '')
if len(projection) == 0:
# no projection was found in dataset or metadata:
# generate WGS84 by default
projection = latlongSRS.ExportToWkt()
# set projection
self.dataset.SetProjection(self.repare_projection(projection))
# check if GCPs were added from input dataset
gcpCount = firstSubDataset.GetGCPCount()
if gcpCount == 0:
# if no GCPs in input dataset: try to add GCPs from metadata
gcpCount = self.add_gcps_from_metadata(geoMetadata)
# Find proper bands and insert GEOLOCATION ARRAY into dataset
if len(xDatasetSource) > 0 and len(yDatasetSource) > 0:
self.add_geolocationArray(GeolocationArray(xDatasetSource,
yDatasetSource))
elif gcpCount == 0:
# if no GCPs found and not GEOLOCATION ARRAY set:
# Set Nansat Geotransform if it is not set automatically
geoTransform = self.dataset.GetGeoTransform()
if len(geoTransform) == 0:
geoTransformStr = geoMetadata.get('GeoTransform',
'(0|1|0|0|0|0|1)')
geoTransform = eval(geoTransformStr.replace('|', ','))
self.dataset.SetGeoTransform(geoTransform)
if 'start_date' in gdalMetadata:
self._set_time(parse(gdalMetadata['start_date']))
def repare_projection(self, projection):
'''Replace odd symbols in projection string '|' => ','; '&' => '"' '''
return projection.replace("|", ",").replace("&", '"')
def add_gcps_from_metadata(self, geoMetadata):
'''Get GCPs from strings in metadata and insert in dataset'''
gcpNames = ['GCPPixel', 'GCPLine', 'GCPX', 'GCPY']
gcpAllValues = []
# for all gcp coordinates
for i, gcpName in enumerate(gcpNames):
# scan throught metadata and find how many lines with each GCP
gcpLineCount = 0
for metaDataItem in geoMetadata:
if gcpName in metaDataItem:
gcpLineCount += 1
# concat all lines
gcpString = ''
for n in range(0, gcpLineCount):
gcpLineName = '%s_%03d' % (gcpName, n)
gcpString += geoMetadata[gcpLineName]
# convert strings to floats
gcpString = gcpString.strip().replace(' ', '')
gcpValues = []
# append all gcps from string
for x in gcpString.split('|'):
if len(x) > 0:
gcpValues.append(float(x))
#gcpValues = [float(x) for x in gcpString.strip().split('|')]
gcpAllValues.append(gcpValues)
# create list of GDAL GCPs
gcps = []
for i in range(0, len(gcpAllValues[0])):
gcps.append(gdal.GCP(gcpAllValues[2][i], gcpAllValues[3][i], 0,
gcpAllValues[0][i], gcpAllValues[1][i]))
if len(gcps) > 0:
# get GCP projection and repare
projection = self.repare_projection(geoMetadata.
get('GCPProjection', ''))
# add GCPs to dataset
self.dataset.SetGCPs(gcps, projection)
self._remove_geotransform()
return len(gcps)
| gpl-3.0 | -2,212,177,753,891,639,300 | 46.019608 | 120 | 0.502002 | false |
pombredanne/zero-install | zeroinstall/__init__.py | 1 | 1476 | """
The Python implementation of the Zero Install injector is divided into four sub-packages:
- L{zeroinstall.injector} contains most of the interesting stuff for managing feeds, keys and downloads and for selecting versions
- L{zeroinstall.zerostore} contains low-level code for handling the implementation cache (where unpacked packages are stored)
- L{zeroinstall.gtkui} contains code for making GTK user-interfaces
- L{zeroinstall.support} contains helper code (not really specific to Zero Install)
@copyright: (C) 2010, Thomas Leonard
@see: U{http://0install.net}
@var _: a function for translating strings using the zero-install domain (for use internally by Zero Install)
"""
version = '0.52'
import gettext
from os.path import dirname, join
try:
localedir = None
translation = gettext.translation('zero-install', fallback = False)
except:
localedir = join(dirname(dirname(__file__)), 'share', 'locale')
translation = gettext.translation('zero-install',
localedir = localedir,
fallback = True)
_ = translation.ugettext
class SafeException(Exception):
"""An exception that can be reported to the user without a stack trace.
The command-line interface's C{--verbose} option will display the full stack trace."""
class NeedDownload(SafeException):
"""Thrown by L{injector.autopolicy.AutoPolicy} if we tried to start a download
and downloading is disabled."""
def __init__(self, url):
Exception.__init__(self, _("Would download '%s'") % url)
| lgpl-2.1 | -7,319,739,571,315,520,000 | 37.842105 | 131 | 0.752033 | false |
datalogics-robb/scons | test/site_scons/basic.py | 2 | 2127 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
"""
Verify basic functionality of the site_scons dir and
site_scons/site_init.py file:
Make sure the site_scons/site_init.py file gets loaded,
and make sure a tool can be loaded from the site_scons/site_tools subdir,
even when executing out of a subdirectory.
"""
test = TestSCons.TestSCons()
test.subdir('site_scons', ['site_scons', 'site_tools'])
test.write(['site_scons', 'site_init.py'], """
from SCons.Script import *
print "Hi there, I am in site_scons/site_init.py!"
""")
test.write(['site_scons', 'site_tools', 'mytool.py'], """
import SCons.Tool
def generate(env):
env['MYTOOL']='mytool'
def exists(env):
return 1
""")
test.write('SConstruct', """
e=Environment(tools=['default', 'mytool'])
print e.subst('My site tool is $MYTOOL')
""")
test.run(arguments = '-Q .',
stdout = """Hi there, I am in site_scons/site_init.py!
My site tool is mytool
scons: `.' is up to date.\n""")
test.pass_test()
# end of file
| mit | 230,234,978,400,440,640 | 29.826087 | 73 | 0.719793 | false |
yasserglez/tagfs | packages/tagfs/contrib/django/db/__init__.py | 8 | 3420 | import os
from django.conf import settings
from django.core import signals
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import curry
from django.utils.importlib import import_module
__all__ = ('backend', 'connection', 'DatabaseError', 'IntegrityError')
if not settings.DATABASE_ENGINE:
settings.DATABASE_ENGINE = 'dummy'
def load_backend(backend_name):
try:
# Most of the time, the database backend will be one of the official
# backends that ships with Django, so look there first.
return import_module('.base', 'django.db.backends.%s' % backend_name)
except ImportError, e:
# If the import failed, we might be looking for a database backend
# distributed external to Django. So we'll try that next.
try:
return import_module('.base', backend_name)
except ImportError, e_user:
# The database backend wasn't found. Display a helpful error message
# listing all possible (built-in) database backends.
backend_dir = os.path.join(__path__[0], 'backends')
try:
available_backends = [f for f in os.listdir(backend_dir)
if os.path.isdir(os.path.join(backend_dir, f))
and not f.startswith('.')]
except EnvironmentError:
available_backends = []
available_backends.sort()
if backend_name not in available_backends:
error_msg = "%r isn't an available database backend. Available options are: %s\nError was: %s" % \
(backend_name, ", ".join(map(repr, available_backends)), e_user)
raise ImproperlyConfigured(error_msg)
else:
raise # If there's some other error, this must be an error in Django itself.
backend = load_backend(settings.DATABASE_ENGINE)
# `connection`, `DatabaseError` and `IntegrityError` are convenient aliases
# for backend bits.
# DatabaseWrapper.__init__() takes a dictionary, not a settings module, so
# we manually create the dictionary from the settings, passing only the
# settings that the database backends care about. Note that TIME_ZONE is used
# by the PostgreSQL backends.
connection = backend.DatabaseWrapper({
'DATABASE_HOST': settings.DATABASE_HOST,
'DATABASE_NAME': settings.DATABASE_NAME,
'DATABASE_OPTIONS': settings.DATABASE_OPTIONS,
'DATABASE_PASSWORD': settings.DATABASE_PASSWORD,
'DATABASE_PORT': settings.DATABASE_PORT,
'DATABASE_USER': settings.DATABASE_USER,
'TIME_ZONE': settings.TIME_ZONE,
})
DatabaseError = backend.DatabaseError
IntegrityError = backend.IntegrityError
# Register an event that closes the database connection
# when a Django request is finished.
def close_connection(**kwargs):
connection.close()
signals.request_finished.connect(close_connection)
# Register an event that resets connection.queries
# when a Django request is started.
def reset_queries(**kwargs):
connection.queries = []
signals.request_started.connect(reset_queries)
# Register an event that rolls back the connection
# when a Django request has an exception.
def _rollback_on_exception(**kwargs):
from django.db import transaction
try:
transaction.rollback_unless_managed()
except DatabaseError:
pass
signals.got_request_exception.connect(_rollback_on_exception)
| mit | 3,173,112,652,308,821,500 | 40.707317 | 114 | 0.688012 | false |
arielalmendral/ert | python/python/ert_gui/ertwidgets/validationsupport.py | 3 | 3782 | from PyQt4.QtCore import Qt, QPoint, QObject, pyqtSignal
from PyQt4.QtGui import QWidget, QVBoxLayout, QSizePolicy, QFrame, QColor, QLabel
class ErrorPopup(QWidget):
error_template = ("<html>"
"<table style='background-color: #ffdfdf;'width='100%%'>"
"<tr><td style='font-weight: bold; padding-left: 5px;'>Warning:</td></tr>"
"%s"
"</table>"
"</html>")
def __init__(self):
QWidget.__init__(self, None, Qt.ToolTip)
self.resize(300, 50)
self.setContentsMargins(0, 0, 0, 0)
layout = QVBoxLayout()
layout.setMargin(0)
self._error_widget = QLabel("")
self._error_widget.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Minimum)
self._error_widget.setFrameStyle(QFrame.Box)
self._error_widget.setWordWrap(True)
self._error_widget.setScaledContents(True)
# self.warning_widget.setAlignment(Qt.AlignHCenter)
self._error_widget.setTextFormat(Qt.RichText)
layout.addWidget(self._error_widget)
self.setLayout(layout)
def presentError(self, widget, error):
assert isinstance(widget, QWidget)
self._error_widget.setText(ErrorPopup.error_template % error)
self.show()
size_hint = self.sizeHint()
rect = widget.rect()
p = widget.mapToGlobal(QPoint(rect.left(), rect.top()))
self.setGeometry(p.x(), p.y() - size_hint.height() - 5, size_hint.width(), size_hint.height())
self.raise_()
class ValidationSupport(QObject):
STRONG_ERROR_COLOR = QColor(255, 215, 215)
ERROR_COLOR = QColor(255, 235, 235)
INVALID_COLOR = QColor(235, 235, 255)
WARNING = "warning"
EXCLAMATION = "ide/small/exclamation"
validationChanged = pyqtSignal(bool)
def __init__(self, validation_target):
""" @type validation_target: QWidget """
QObject.__init__(self)
self._validation_target = validation_target
self._validation_message = None
self._validation_type = None
self._error_popup = ErrorPopup()
self._originalEnterEvent = validation_target.enterEvent
self._originalLeaveEvent = validation_target.leaveEvent
self._originalHideEvent = validation_target.hideEvent
def enterEvent(event):
self._originalEnterEvent(event)
if not self.isValid():
self._error_popup.presentError(self._validation_target, self._validation_message)
validation_target.enterEvent = enterEvent
def leaveEvent(event):
self._originalLeaveEvent(event)
if self._error_popup is not None:
self._error_popup.hide()
validation_target.leaveEvent = leaveEvent
def hideEvent(hide_event):
self._error_popup.hide()
self._originalHideEvent(hide_event)
validation_target.hideEvent = hideEvent
def setValidationMessage(self, message, validation_type=WARNING):
"""Add a warning or information icon to the widget with a tooltip"""
message = message.strip()
if message == "":
self._validation_type = None
self._validation_message = None
self._error_popup.hide()
self.validationChanged.emit(True)
else:
self._validation_type = validation_type
self._validation_message = message
if self._validation_target.hasFocus() or self._validation_target.underMouse():
self._error_popup.presentError(self._validation_target, self._validation_message)
self.validationChanged.emit(False)
def isValid(self):
return self._validation_message is None
| gpl-3.0 | 8,552,073,589,198,036,000 | 33.381818 | 102 | 0.617663 | false |
Arable/evepod | lib/python2.7/site-packages/newrelic-2.12.0.10/newrelic/api/memcache_trace.py | 2 | 2454 | import sys
import types
import time
import newrelic.core.memcache_node
import newrelic.api.transaction
import newrelic.api.time_trace
import newrelic.api.object_wrapper
class MemcacheTrace(newrelic.api.time_trace.TimeTrace):
node = newrelic.core.memcache_node.MemcacheNode
def __init__(self, transaction, command):
super(MemcacheTrace, self).__init__(transaction)
self.command = command
def dump(self, file):
print >> file, self.__class__.__name__, dict(command=self.command)
def create_node(self):
return self.node(command=self.command, children=self.children,
start_time=self.start_time, end_time=self.end_time,
duration=self.duration, exclusive=self.exclusive)
def terminal_node(self):
return True
class MemcacheTraceWrapper(object):
def __init__(self, wrapped, command):
if isinstance(wrapped, tuple):
(instance, wrapped) = wrapped
else:
instance = None
newrelic.api.object_wrapper.update_wrapper(self, wrapped)
self._nr_instance = instance
self._nr_next_object = wrapped
if not hasattr(self, '_nr_last_object'):
self._nr_last_object = wrapped
self._nr_command = command
def __get__(self, instance, klass):
if instance is None:
return self
descriptor = self._nr_next_object.__get__(instance, klass)
return self.__class__((instance, descriptor), self._nr_command)
def __call__(self, *args, **kwargs):
transaction = newrelic.api.transaction.current_transaction()
if not transaction:
return self._nr_next_object(*args, **kwargs)
if callable(self._nr_command):
if self._nr_instance is not None:
command = self._nr_command(self._nr_instance, *args,
**kwargs)
else:
command = self._nr_command(*args, **kwargs)
else:
command = self._nr_command
with MemcacheTrace(transaction, command):
return self._nr_next_object(*args, **kwargs)
def memcache_trace(command):
def decorator(wrapped):
return MemcacheTraceWrapper(wrapped, command)
return decorator
def wrap_memcache_trace(module, object_path, command):
newrelic.api.object_wrapper.wrap_object(module, object_path,
MemcacheTraceWrapper, (command,))
| apache-2.0 | 7,972,447,473,650,405,000 | 30.063291 | 74 | 0.622249 | false |
cwisecarver/osf.io | osf/models/__init__.py | 1 | 2134 | from osf.models.metaschema import MetaSchema # noqa
from osf.models.base import Guid, BlackListGuid # noqa
from osf.models.user import OSFUser # noqa
from osf.models.contributor import Contributor, RecentlyAddedContributor # noqa
from osf.models.session import Session # noqa
from osf.models.institution import Institution # noqa
from osf.models.node import AbstractNode, Node, Collection # noqa
from osf.models.sanctions import Sanction, Embargo, Retraction, RegistrationApproval, DraftRegistrationApproval, EmbargoTerminationApproval # noqa
from osf.models.registrations import Registration, DraftRegistrationLog, DraftRegistration # noqa
from osf.models.nodelog import NodeLog # noqa
from osf.models.tag import Tag # noqa
from osf.models.comment import Comment # noqa
from osf.models.conference import Conference, MailRecord # noqa
from osf.models.citation import AlternativeCitation, CitationStyle # noqa
from osf.models.archive import ArchiveJob, ArchiveTarget # noqa
from osf.models.queued_mail import QueuedMail # noqa
from osf.models.external import ExternalAccount, ExternalProvider # noqa
from osf.models.oauth import ApiOAuth2Application, ApiOAuth2PersonalToken, ApiOAuth2Scope # noqa
from osf.models.licenses import NodeLicense, NodeLicenseRecord # noqa
from osf.models.private_link import PrivateLink # noqa
from osf.models.notifications import NotificationDigest, NotificationSubscription # noqa
from osf.models.spam import SpamStatus, SpamMixin # noqa
from osf.models.subject import Subject # noqa
from osf.models.preprint_provider import PreprintProvider # noqa
from osf.models.preprint_service import PreprintService # noqa
from osf.models.identifiers import Identifier # noqa
from osf.models.files import ( # noqa
BaseFileNode,
File, Folder, # noqa
FileVersion, TrashedFile, TrashedFileNode, TrashedFolder, # noqa
) # noqa
from osf.models.node_relation import NodeRelation # noqa
from osf.models.analytics import UserActivityCounter, PageCounter # noqa
from osf.models.admin_profile import AdminProfile # noqa
from osf.models.admin_log_entry import AdminLogEntry # noqa
| apache-2.0 | -5,058,507,975,873,217,000 | 59.971429 | 147 | 0.81209 | false |
lczub/TestLink-API-Python-client | example/Last7DaysTestCases.py | 1 | 2235 | #! /usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright 2011-2019 Olivier Renault, James Stock, TestLink-API-Python-client developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------
# This example shows, how the API could be used to list all test cases,
# which have been created during the last 7 days
from __future__ import print_function
from testlink import TestlinkAPIClient, TestLinkHelper
import time
def iterTCasesfromTProject(api, TProjName, date1, date2):
""" returns as iterator all test cases of project TPROJTNAME, which are
created between DATE1 and DATE2
DATE1 and DATE2 must be of type time.struct_time """
TProjId = api.getTestProjectByName(TProjName)['id']
for TSinfo in api.getFirstLevelTestSuitesForTestProject(TProjId):
TSuiteId = TSinfo['id']
for TCid in api.getTestCasesForTestSuite(TSuiteId, deep=1,details='only_id'):
TCdata = api.getTestCase(TCid)[0] #really only one TC?
dateTC=time.strptime(TCdata['creation_ts'][:10], '%Y-%m-%d')
if (date1 <= dateTC) and (dateTC <= date2):
yield TCdata
if __name__ == '__main__':
tlapi = TestLinkHelper().connect(TestlinkAPIClient)
projName = 'NEW_PROJECT_API'
currentTime = time.localtime()
oldTime = time.localtime(time.time() - 3600 * 24 * 7)
print('%s test cases created between %s and %s' % \
(projName, time.strftime('%Y-%m-%d', oldTime),
time.strftime('%Y-%m-%d', currentTime)))
for TCdata in iterTCasesfromTProject(tlapi, projName, oldTime, currentTime):
print(' %(name)s %(version)s %(creation_ts)s' % TCdata)
| apache-2.0 | 583,941,020,991,559,800 | 41.169811 | 90 | 0.66085 | false |
iseppi/zookeepr | zk/model/proposal.py | 4 | 11472 | """The application's model objects"""
import sqlalchemy as sa
from meta import Base
from pylons.controllers.util import abort
from meta import Session
from person import Person
from person_proposal_map import person_proposal_map
from attachment import Attachment
from review import Review
from stream import Stream
class ProposalStatus(Base):
"""Stores both account login details and personal information.
"""
__tablename__ = 'proposal_status'
id = sa.Column(sa.types.Integer, primary_key=True)
# title of proposal
name = sa.Column(sa.types.String(40), unique=True, nullable=False)
def __init__(self, **kwargs):
# remove the args that should never be set via creation
super(ProposalStatus, self).__init__(**kwargs)
@classmethod
def find_by_id(cls, id):
return Session.query(ProposalStatus).filter_by(id=id).first()
@classmethod
def find_by_name(cls, name):
return Session.query(ProposalStatus).filter_by(name=name).first()
@classmethod
def find_all(cls):
return Session.query(ProposalStatus).order_by(ProposalStatus.name).all()
class ProposalType(Base):
"""Stores both account login details and personal information.
"""
__tablename__ = 'proposal_type'
id = sa.Column(sa.types.Integer, primary_key=True)
# title of proposal
name = sa.Column(sa.types.String(40), unique=True, nullable=False)
notify_email = sa.Column(sa.types.Text, nullable=True)
def __init__(self, **kwargs):
# remove the args that should never be set via creation
super(ProposalType, self).__init__(**kwargs)
@classmethod
def find_by_id(cls, id):
return Session.query(ProposalType).filter_by(id=id).first()
@classmethod
def find_by_name(cls, name):
return Session.query(ProposalType).filter_by(name=name).first()
@classmethod
def find_all(cls):
return Session.query(ProposalType).order_by(ProposalType.name).all()
class TravelAssistanceType(Base):
__tablename__ = 'travel_assistance_type'
id = sa.Column(sa.types.Integer, primary_key=True)
name = sa.Column(sa.types.String(60), unique=True, nullable=False)
def __init__(self, **kwargs):
# remove the args that should never be set via creation
super(TravelAssistanceType, self).__init__(**kwargs)
@classmethod
def find_by_id(cls, id):
return Session.query(TravelAssistanceType).filter_by(id=id).first()
@classmethod
def find_by_name(cls, name):
return Session.query(TravelAssistanceType).filter_by(name=name).first()
@classmethod
def find_all(cls):
return Session.query(TravelAssistanceType).order_by(TravelAssistanceType.name).all()
class TargetAudience(Base):
__tablename__ = 'target_audience'
id = sa.Column(sa.types.Integer, primary_key=True)
name = sa.Column(sa.types.String(40), unique=True, nullable=False)
def __init__(self, **kwargs):
# remove the args that should never be set via creation
super(TargetAudience, self).__init__(**kwargs)
@classmethod
def find_by_id(cls, id):
return Session.query(TargetAudience).filter_by(id=id).first()
@classmethod
def find_by_name(cls, name):
return Session.query(TargetAudience).filter_by(name=name).first()
@classmethod
def find_all(cls):
return Session.query(TargetAudience).order_by(TargetAudience.name).all()
class AccommodationAssistanceType(Base):
__tablename__ = 'accommodation_assistance_type'
id = sa.Column(sa.types.Integer, primary_key=True)
# title of proposal
name = sa.Column(sa.types.String(120), unique=True, nullable=False)
def __init__(self, **kwargs):
# remove the args that should never be set via creation
super(AccommodationAssistanceType, self).__init__(**kwargs)
@classmethod
def find_by_id(cls, id):
return Session.query(AccommodationAssistanceType).filter_by(id=id).first()
@classmethod
def find_by_name(cls, name):
return Session.query(AccommodationAssistanceType).filter_by(name=name).first()
@classmethod
def find_all(cls):
return Session.query(AccommodationAssistanceType).order_by(AccommodationAssistanceType.name).all()
class Proposal(Base):
"""Stores both account login details and personal information.
"""
__tablename__ = 'proposal'
id = sa.Column(sa.types.Integer, primary_key=True)
# title of proposal
title = sa.Column(sa.types.Text, nullable=False)
# abstract or description
abstract = sa.Column(sa.types.Text, nullable=False)
private_abstract = sa.Column(sa.types.Text, nullable=False)
technical_requirements = sa.Column(sa.types.Text, nullable=False)
# type, enumerated in the proposal_type table
proposal_type_id = sa.Column(sa.types.Integer, sa.ForeignKey('proposal_type.id'), nullable=False)
# allocated stream of talk
stream_id = sa.Column(sa.types.Integer, sa.ForeignKey('stream.id'))
# type, enumerated in the assistance_type table
travel_assistance_type_id = sa.Column(sa.types.Integer, sa.ForeignKey('travel_assistance_type.id'), nullable=False)
accommodation_assistance_type_id = sa.Column(sa.types.Integer, sa.ForeignKey('accommodation_assistance_type.id'), nullable=False)
status_id = sa.Column(sa.types.Integer, sa.ForeignKey('proposal_status.id'), nullable=False)
target_audience_id = sa.Column(sa.types.Integer, sa.ForeignKey('target_audience.id'), nullable=False)
video_release = sa.Column(sa.types.Boolean, nullable=False)
slides_release = sa.Column(sa.types.Boolean, nullable=False)
# name and url of the project
project = sa.Column(sa.types.Text, nullable=False)
url = sa.Column(sa.types.Text, nullable=True)
# url to a short video
abstract_video_url = sa.Column(sa.types.Text, nullable=True)
creation_timestamp = sa.Column(sa.types.DateTime, nullable=False, default=sa.func.current_timestamp())
last_modification_timestamp = sa.Column(sa.types.DateTime, nullable=False, default=sa.func.current_timestamp(), onupdate=sa.func.current_timestamp())
# relations
type = sa.orm.relation(ProposalType, backref='proposals')
stream = sa.orm.relation(Stream)
accommodation_assistance = sa.orm.relation(AccommodationAssistanceType)
travel_assistance = sa.orm.relation(TravelAssistanceType)
status = sa.orm.relation(ProposalStatus, backref='proposals')
audience = sa.orm.relation(TargetAudience)
people = sa.orm.relation(Person, secondary=person_proposal_map, backref='proposals')
attachments = sa.orm.relation(Attachment, cascade='all, delete-orphan')
reviews = sa.orm.relation(Review, backref='proposal', cascade='all, delete-orphan')
def __init__(self, **kwargs):
# remove the args that should never be set via creation
super(Proposal, self).__init__(**kwargs)
stream_id = None
def __repr__(self):
return '<Proposal id="%r" title="%s">' % (self.id, self.title)
def _get_accepted(self):
return self.status.name == 'Accepted'
accepted = property(_get_accepted)
def _get_offered(self):
return 'Offered' in self.status.name
offered = property(_get_offered)
def _get_withdrawn(self):
return self.status.name == 'Withdrawn'
withdrawn = property(_get_withdrawn)
def _get_declined(self):
return self.status.name == 'Declined'
declined = property(_get_declined)
def _get_proposer_status(self):
if self.accepted or self.withdrawn or self.declined or self.offered:
return self.status.name
else:
return "Under Review"
proposer_status = property(_get_proposer_status)
@classmethod
def find_by_id(cls, id, abort_404 = True):
result = Session.query(Proposal).filter_by(id=id).first()
if result is None and abort_404:
abort(404, "No such proposal object")
return result
@classmethod
def find_by_title(cls, title):
return Session.query(Proposal).filter_by(title=title).order_by(Proposal.title).all()
@classmethod
def find_all(cls):
return Session.query(Proposal).order_by(Proposal.id).all()
@classmethod
def find_all_by_accommodation_assistance_type_id(cls, id, abort_404 = True):
result = Session.query(Proposal).filter_by(accommodation_assistance_type_id=id).all()
if result is None and abort_404:
abort(404, "No such proposal object")
return result
@classmethod
def find_all_by_travel_assistance_type_id(cls, id, abort_404 = True):
result = Session.query(Proposal).filter_by(travel_assistance_type_id=id).all()
if result is None and abort_404:
abort(404, "No such proposal object")
return result
# TODO: add an optional filter for removing the signed in user's proposals
@classmethod
def find_all_by_proposal_type_id(cls, id, abort_404 = True, include_withdrawn=True):
result = Session.query(Proposal).filter_by(proposal_type_id=id)
if not include_withdrawn:
withdrawn = ProposalStatus.find_by_name('Withdrawn')
result = result.filter(Proposal.status_id != withdrawn.id)
result = result.all()
if result is None and abort_404:
abort(404, "No such proposal object")
return result
@classmethod
def find_all_accepted(cls):
return Session.query(Proposal).filter(ProposalStatus.name=='Accepted')
@classmethod
def find_all_accepted_without_event(cls):
status = ProposalStatus.find_by_name('Accepted')
return Session.query(Proposal).filter_by(status=status).filter_by(event=None).all()
@classmethod
def find_accepted_by_id(cls, id):
#status = ProposalStatus.find_by_name('Accepted')
#result = Session.query(Proposal).filter_by(id=id,status_id=status.id)
# Optimisation: assume that ProposalStatus of ID=1 is Accepted
result = Session.query(Proposal).filter_by(id=id,status_id=1).one()
return result
# TODO: add an optional filter for removing the signed in user's proposals
@classmethod
def find_next_proposal(cls, id, type_id, signed_in_person_id):
withdrawn = ProposalStatus.find_by_name('Withdrawn')
next = Session.query(Proposal).from_statement("""
SELECT
p.id
FROM
(SELECT id
FROM proposal
WHERE id <> %d
AND status_id <> %d
AND proposal_type_id = %d
EXCEPT
SELECT proposal_id AS id
FROM review
WHERE review.reviewer_id = %d) AS p
LEFT JOIN
review AS r
ON(p.id=r.proposal_id)
GROUP BY
p.id
ORDER BY COUNT(r.reviewer_id), RANDOM()
LIMIT 1
""" % (id, withdrawn.id, type_id, signed_in_person_id))
next = next.first()
if next is not None:
return next.id
else:
# looks like you've reviewed everything!
return None
@classmethod
def find_review_summary(cls):
from review import Review
return Review.stats_query().join(cls).add_entity(cls).group_by(cls)
| gpl-2.0 | -2,526,908,764,949,024,000 | 35.303797 | 153 | 0.65734 | false |
vmax-feihu/hue | desktop/core/ext-py/lxml-3.3.6/src/lxml/sax.py | 24 | 8493 | """
SAX-based adapter to copy trees from/to the Python standard library.
Use the `ElementTreeContentHandler` class to build an ElementTree from
SAX events.
Use the `ElementTreeProducer` class or the `saxify()` function to fire
the SAX events of an ElementTree against a SAX ContentHandler.
See http://codespeak.net/lxml/sax.html
"""
from xml.sax.handler import ContentHandler
from lxml import etree
from lxml.etree import ElementTree, SubElement
from lxml.etree import Comment, ProcessingInstruction
class SaxError(etree.LxmlError):
"""General SAX error.
"""
pass
def _getNsTag(tag):
if tag[0] == '{':
return tuple(tag[1:].split('}', 1))
else:
return (None, tag)
class ElementTreeContentHandler(ContentHandler):
"""Build an lxml ElementTree from SAX events.
"""
def __init__(self, makeelement=None):
self._root = None
self._root_siblings = []
self._element_stack = []
self._default_ns = None
self._ns_mapping = { None : [None] }
self._new_mappings = {}
if makeelement is None:
makeelement = etree.Element
self._makeelement = makeelement
def _get_etree(self):
"Contains the generated ElementTree after parsing is finished."
return ElementTree(self._root)
etree = property(_get_etree, doc=_get_etree.__doc__)
def setDocumentLocator(self, locator):
pass
def startDocument(self):
pass
def endDocument(self):
pass
def startPrefixMapping(self, prefix, uri):
self._new_mappings[prefix] = uri
try:
self._ns_mapping[prefix].append(uri)
except KeyError:
self._ns_mapping[prefix] = [uri]
if prefix is None:
self._default_ns = uri
def endPrefixMapping(self, prefix):
ns_uri_list = self._ns_mapping[prefix]
ns_uri_list.pop()
if prefix is None:
self._default_ns = ns_uri_list[-1]
def _buildTag(self, ns_name_tuple):
ns_uri, local_name = ns_name_tuple
if ns_uri:
el_tag = "{%s}%s" % ns_name_tuple
elif self._default_ns:
el_tag = "{%s}%s" % (self._default_ns, local_name)
else:
el_tag = local_name
return el_tag
def startElementNS(self, ns_name, qname, attributes=None):
el_name = self._buildTag(ns_name)
if attributes:
attrs = {}
try:
iter_attributes = attributes.iteritems()
except AttributeError:
iter_attributes = attributes.items()
for name_tuple, value in iter_attributes:
if name_tuple[0]:
attr_name = "{%s}%s" % name_tuple
else:
attr_name = name_tuple[1]
attrs[attr_name] = value
else:
attrs = None
element_stack = self._element_stack
if self._root is None:
element = self._root = \
self._makeelement(el_name, attrs, self._new_mappings)
if self._root_siblings and hasattr(element, 'addprevious'):
for sibling in self._root_siblings:
element.addprevious(sibling)
del self._root_siblings[:]
else:
element = SubElement(element_stack[-1], el_name,
attrs, self._new_mappings)
element_stack.append(element)
self._new_mappings.clear()
def processingInstruction(self, target, data):
pi = ProcessingInstruction(target, data)
if self._root is None:
self._root_siblings.append(pi)
else:
self._element_stack[-1].append(pi)
def endElementNS(self, ns_name, qname):
element = self._element_stack.pop()
el_tag = self._buildTag(ns_name)
if el_tag != element.tag:
raise SaxError("Unexpected element closed: " + el_tag)
def startElement(self, name, attributes=None):
if attributes:
attributes = dict(
[((None, k), v) for k, v in attributes.items()]
)
self.startElementNS((None, name), name, attributes)
def endElement(self, name):
self.endElementNS((None, name), name)
def characters(self, data):
last_element = self._element_stack[-1]
try:
# if there already is a child element, we must append to its tail
last_element = last_element[-1]
last_element.tail = (last_element.tail or '') + data
except IndexError:
# otherwise: append to the text
last_element.text = (last_element.text or '') + data
ignorableWhitespace = characters
class ElementTreeProducer(object):
"""Produces SAX events for an element and children.
"""
def __init__(self, element_or_tree, content_handler):
try:
element = element_or_tree.getroot()
except AttributeError:
element = element_or_tree
self._element = element
self._content_handler = content_handler
from xml.sax.xmlreader import AttributesNSImpl as attr_class
self._attr_class = attr_class
self._empty_attributes = attr_class({}, {})
def saxify(self):
self._content_handler.startDocument()
element = self._element
if hasattr(element, 'getprevious'):
siblings = []
sibling = element.getprevious()
while getattr(sibling, 'tag', None) is ProcessingInstruction:
siblings.append(sibling)
sibling = sibling.getprevious()
for sibling in siblings[::-1]:
self._recursive_saxify(sibling, {})
self._recursive_saxify(element, {})
if hasattr(element, 'getnext'):
sibling = element.getnext()
while getattr(sibling, 'tag', None) is ProcessingInstruction:
self._recursive_saxify(sibling, {})
sibling = sibling.getnext()
self._content_handler.endDocument()
def _recursive_saxify(self, element, prefixes):
content_handler = self._content_handler
tag = element.tag
if tag is Comment or tag is ProcessingInstruction:
if tag is ProcessingInstruction:
content_handler.processingInstruction(
element.target, element.text)
if element.tail:
content_handler.characters(element.tail)
return
new_prefixes = []
build_qname = self._build_qname
attribs = element.items()
if attribs:
attr_values = {}
attr_qnames = {}
for attr_ns_name, value in attribs:
attr_ns_tuple = _getNsTag(attr_ns_name)
attr_values[attr_ns_tuple] = value
attr_qnames[attr_ns_tuple] = build_qname(
attr_ns_tuple[0], attr_ns_tuple[1], prefixes, new_prefixes)
sax_attributes = self._attr_class(attr_values, attr_qnames)
else:
sax_attributes = self._empty_attributes
ns_uri, local_name = _getNsTag(tag)
qname = build_qname(ns_uri, local_name, prefixes, new_prefixes)
for prefix, uri in new_prefixes:
content_handler.startPrefixMapping(prefix, uri)
content_handler.startElementNS((ns_uri, local_name),
qname, sax_attributes)
if element.text:
content_handler.characters(element.text)
for child in element:
self._recursive_saxify(child, prefixes)
content_handler.endElementNS((ns_uri, local_name), qname)
for prefix, uri in new_prefixes:
content_handler.endPrefixMapping(prefix)
if element.tail:
content_handler.characters(element.tail)
def _build_qname(self, ns_uri, local_name, prefixes, new_prefixes):
if ns_uri is None:
return local_name
try:
prefix = prefixes[ns_uri]
except KeyError:
prefix = prefixes[ns_uri] = 'ns%02d' % len(prefixes)
new_prefixes.append( (prefix, ns_uri) )
return prefix + ':' + local_name
def saxify(element_or_tree, content_handler):
"""One-shot helper to generate SAX events from an XML tree and fire
them against a SAX ContentHandler.
"""
return ElementTreeProducer(element_or_tree, content_handler).saxify()
| apache-2.0 | -4,531,381,459,776,535,600 | 33.384615 | 79 | 0.580949 | false |
agiliq/nginx-python-buildpack | vendor/setuptools-2.1/tests/test_ez_setup.py | 10 | 1847 | import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from ez_setup import (use_setuptools, _build_egg, _python_cmd, _do_download,
_install, DEFAULT_URL, DEFAULT_VERSION)
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "''", "sdist",
"--dist-dir", "%s" % self.tmpdir)
tarball = os.listdir(self.tmpdir)[0]
self.tarball = os.path.join(self.tmpdir, tarball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_build_egg(self):
# making it an egg
egg = _build_egg('Egg to be built', self.tarball, self.tmpdir)
# now trying to import it
sys.path[0] = egg
import setuptools
self.assertTrue(setuptools.__file__.startswith(egg))
def test_do_download(self):
tmpdir = tempfile.mkdtemp()
_do_download(DEFAULT_VERSION, DEFAULT_URL, tmpdir, 1)
import setuptools
self.assertTrue(setuptools.bootstrap_install_from.startswith(tmpdir))
def test_install(self):
def _faked(*args):
return True
ez_setup.python_cmd = _faked
_install(self.tarball)
def test_use_setuptools(self):
self.assertEqual(use_setuptools(), None)
if __name__ == '__main__':
unittest.main()
| mit | 1,931,628,004,561,361,700 | 28.31746 | 77 | 0.619383 | false |
gablg1/PerfKitBenchmarker | perfkitbenchmarker/packages/openmpi.py | 4 | 1879 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing OpenMPI installation and cleanup functions."""
from perfkitbenchmarker import vm_util
MPI_DIR = '%s/openmpi-1.6.5' % vm_util.VM_TMP_DIR
MPI_TAR = 'openmpi-1.6.5.tar.gz'
MPI_URL = 'http://www.open-mpi.org/software/ompi/v1.6/downloads/' + MPI_TAR
def _Install(vm):
"""Installs the OpenMPI package on the VM."""
vm.Install('build_tools')
vm.Install('wget')
vm.RemoteCommand('wget %s -P %s' % (MPI_URL, vm_util.VM_TMP_DIR))
vm.RemoteCommand('cd %s && tar xvfz %s' % (vm_util.VM_TMP_DIR, MPI_TAR))
make_jobs = vm.num_cpus
config_cmd = ('./configure --enable-static --disable-shared --disable-dlopen '
'--prefix=/usr')
vm.RemoteCommand('cd %s && %s && make -j %s && sudo make install' %
(MPI_DIR, config_cmd, make_jobs))
def YumInstall(vm):
"""Installs the OpenMPI package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the OpenMPI package on the VM."""
_Install(vm)
def _Uninstall(vm):
"""Uninstalls the OpenMPI package on the VM."""
vm.RemoteCommand('cd {0} && sudo make uninstall'.format(MPI_DIR))
def YumUninstall(vm):
"""Uninstalls the OpenMPI package on the VM."""
_Uninstall(vm)
def AptUninstall(vm):
"""Uninstalls the OpenMPI package on the VM."""
_Uninstall(vm)
| apache-2.0 | -4,559,855,008,036,458,500 | 30.316667 | 80 | 0.685471 | false |
upconsulting/IsisCB | isiscb/zotero/tasks.py | 1 | 10839 | """
These functions are mostly related to transitioning data from the Zotero app
to the IsisData app.
TODO: many of these functions could use refactoring, or at least modularizing
for easier testing.
"""
from __future__ import unicode_literals
from django.db.models import Q
from isisdata.models import *
from zotero.models import *
import iso8601
def _record_history_message(request, accession):
template = u'Created from Zotero accession {0}, performed at {1} by {2}.' \
+ u' Subsequently ingested by {3}.'
values = (
accession.id,
accession.imported_on,
accession.imported_by,
request.user.username
)
return template.format(*values)
def ingest_accession(request, accession):
"""
Move all constituents of an :class:`.ImportAccession` into the production
database.
"""
ingested = [] # These will be production Citation instances.
ingested_draft_ids = [] # These will be DraftCitation ids.
for draftcitation in accession.citations_ok:
ingested.append(ingest_citation(request, accession, draftcitation))
ingested_draft_ids.append(draftcitation.id)
ingest_ccrelations(request, accession, ingested_draft_ids)
if accession.citations_remaining.count() == 0:
accession.processed = True
accession.save()
return ingested
def ingest_citation(request, accession, draftcitation):
# If the citation is already resolved, there is nothing to do here: we
# simply return the target of the resolution.
if draftcitation.resolutions.count() > 0:
return draftcitation.resolutions.first().to_instance
citation_fields = [
('title', 'title'),
('description', 'description'),
('abstract', 'abstract'),
('type_controlled', 'type_controlled'),
('book_series', 'book_series'),
('physical_details', 'physical_details')
]
partdetails_fields = [
('page_start', 'page_begin'),
('page_end', 'page_end'),
('pages_free_text', 'pages_free_text'),
('issue', 'issue_free_text'),
('volume', 'volume_free_text'),
('extent', 'extent'),
('extent_note', 'extent_note'),
]
int_only_fields = dict([
('page_start', 'pages_free_text'),
('page_begin', 'pages_free_text'),
('page_end', 'pages_free_text'),
('extent', 'extent_note'),
])
# Gather fields that will be transferred to the production Citation.
citation_data = {}
for field, pfield in citation_fields:
value = getattr(draftcitation, field, None)
if value:
if field in int_only_fields:
try:
value = int(value)
except ValueError: # Not an int!'
citation_data[int_only_fields[pfield]] = value
continue
citation_data[pfield] = value
# Records are inactive/non-public by default. The record_history message
# provides information about the Zotero accession.
citation_data.update({
'_history_user': request.user,
'public': False,
'record_status_value': CuratedMixin.INACTIVE,
'record_status_explanation': u'Inactive by default',
'record_history': _record_history_message(request, accession),
'belongs_to': accession.ingest_to,
'zotero_accession': accession,
})
# Troll for data for PartDetails fields.
partdetails_data = {}
for field, pfield in partdetails_fields:
value = getattr(draftcitation, field)
if value:
if field in int_only_fields:
try:
value = int(value)
except ValueError: # Not an int!
partdetails_data[int_only_fields[pfield]] = value
continue
partdetails_data[pfield] = value
if partdetails_data:
partdetails = PartDetails.objects.create(**partdetails_data)
citation_data.update({'part_details': partdetails})
citation = Citation.objects.create(**citation_data)
InstanceResolutionEvent.objects.create(for_instance=draftcitation, to_instance=citation)
# ISISCB-749 Language should be preserved from Zotero records.
if draftcitation.language:
citation.language.add(draftcitation.language)
date = None
if draftcitation.publication_date:
if type(draftcitation.publication_date) in [str, str]:
try:
date = iso8601.parse_date(draftcitation.publication_date).date()
except iso8601.ParseError:
match = re.search('([0-9]{4})', draftcitation.publication_date)
if match:
date = match.groups()[0]
else:
date = None
elif type(draftcitation.publication_date) is datetime.datetime:
date = draftcitation.publication_date.date()
if date:
if type(date) in [str, str]:
date = iso8601.parse_date(date).date()
citation.publication_date = date
pubdatetype, _ = AttributeType.objects.get_or_create(
name='PublicationDate',
defaults={
'value_content_type': ContentType.objects.get_for_model(ISODateValue)
})
if type(date) in [datetime.datetime, datetime.date]:
value_freeform = date.year
elif type(date) in [str, str]:
value_freeform = date[:4]
attribute = Attribute.objects.create(
type_controlled=pubdatetype,
source=citation,
value_freeform=value_freeform
)
vvalue = ISODateValue.objects.create(
value=date,
attribute=attribute,
)
elif draftcitation.publication_date:
# If we cannot parse the publication date as an ISO8601 date, then we
# update the staff notes with the unparseable date so that it is not
# completely lost.
message= u'\nCould not parse publication date in Zotero metadata: %s'\
% draftcitation.publication_date
if citation.administrator_notes:
citation.administrator_notes += message
else:
citation.administrator_notes = message
citation.save()
for relation in draftcitation.authority_relations.all():
draft = relation.authority
try:
target = draft.resolutions.first().to_instance
except AttributeError: # No resolution target. We create a "headless"
target = None # ACRelation.
citation.record_history += u"\n\nThe attempt to match the name %s in the %s field was skipped." % (draft.name, relation.get_type_controlled_display())
citation.save()
if target:
target.zotero_accession = accession
target.save()
# Transfer any linkeddata from the DraftAuthority to the production
# Authority.
for draftlinkeddata in draft.linkeddata.all():
ldtype, _ = LinkedDataType.objects.get_or_create(name=draftlinkeddata.name.upper())
if not target.linkeddata_entries.filter(type_controlled=ldtype, universal_resource_name=draftlinkeddata.value):
LinkedData.objects.create(
subject = target,
universal_resource_name = draftlinkeddata.value,
type_controlled = ldtype
)
draft.linkeddata.all().update(processed=True)
# ISISCB-577 Created ACRelation records should be active by default.
acr_data = {
'_history_user': request.user,
'name_for_display_in_citation': draft.name,
'record_history': _record_history_message(request, accession),
'public': True,
'record_status_value': CuratedMixin.ACTIVE,
'record_status_explanation': u'Active by default',
'authority': target,
'citation': citation,
'type_controlled': relation.type_controlled,
'belongs_to': accession.ingest_to,
'zotero_accession': accession,
'data_display_order': relation.data_display_order,
}
acrelation = ACRelation.objects.create(**acr_data)
InstanceResolutionEvent.objects.create(
for_instance = relation,
to_instance = acrelation,
)
ld_created = set([])
for linkeddata in citation.linkeddata_entries.all():
ld_created.add(linkeddata.universal_resource_name)
for draftlinkeddata in draftcitation.linkeddata.all():
_key = draftlinkeddata.value
if _key in ld_created:
continue
ld_created.add(_key)
ldtype, _ = LinkedDataType.objects.get_or_create(name=draftlinkeddata.name.upper())
LinkedData.objects.create(
subject = citation,
universal_resource_name = draftlinkeddata.value,
type_controlled = ldtype
)
draftcitation.linkeddata.all().update(processed=True)
draftcitation.authority_relations.all().update(processed=True)
draftcitation.processed = True
draftcitation.save()
accession.save()
return citation
def ingest_ccrelations(request, accession, ingested):
"""
Ingest :class:`.DraftCCRelation` instances among "ready"
:class:`.DraftCitation`\s.
Parameters
----------
request
accession : :class:`.ImportAccession`
ingested : list
List of :class:`.DraftCitation` ids.
Returns
-------
None
"""
# Both source and target must be ingested, and no other resolution for this
# DraftCCRelation may exist.
query = Q(subject_id__in=ingested) & Q(object_id__in=ingested) & Q(resolutions=None)
for relation in accession.draftccrelation_set.filter(query):
draft_source = relation.subject
source = ingest_citation(request, accession, draft_source) # Get.
draft_target = relation.object
target = ingest_citation(request, accession, draft_target)
ccr_data = {
'_history_user': request.user,
'public': True,
'record_history': _record_history_message(request, accession),
'record_status_value': CuratedMixin.ACTIVE,
'record_status_explanation': u'Active by default',
'subject': source,
'object': target,
'type_controlled': relation.type_controlled,
'belongs_to': accession.ingest_to,
'zotero_accession': accession,
}
ccrelation = CCRelation.objects.create(**ccr_data)
InstanceResolutionEvent.objects.create(
for_instance = relation,
to_instance = ccrelation,
)
| mit | -1,027,604,526,228,755,100 | 35.867347 | 162 | 0.609927 | false |
willingc/oh-mainline | mysite/customs/migrations/0005_roundupbugtracker_csv_keyword.py | 17 | 4097 | # This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.customs.models import *
class Migration:
def forwards(self, orm):
# Adding field 'RoundupBugTracker.keyword'
db.add_column('customs_roundupbugtracker', 'keyword', orm['customs.roundupbugtracker:keyword'])
# Changing field 'RoundupBugTracker.include_these_roundup_bug_statuses'
# (to signature: django.db.models.fields.CharField(default='-1,1,2,3,4,5,6', max_length=255))
db.alter_column('customs_roundupbugtracker', 'include_these_roundup_bug_statuses', orm['customs.roundupbugtracker:include_these_roundup_bug_statuses'])
def backwards(self, orm):
# Deleting field 'RoundupBugTracker.keyword'
db.delete_column('customs_roundupbugtracker', 'keyword')
# Changing field 'RoundupBugTracker.include_these_roundup_bug_statuses'
# (to signature: django.db.models.fields.CharField(default='1', max_length=255))
db.alter_column('customs_roundupbugtracker', 'include_these_roundup_bug_statuses', orm['customs.roundupbugtracker:include_these_roundup_bug_statuses'])
models = {
'customs.roundupbugtracker': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_these_roundup_bug_statuses': ('django.db.models.fields.CharField', [], {'default': "'-1,1,2,3,4,5,6'", 'max_length': '255'}),
'keyword': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50', 'null': 'True'}),
'my_bugs_are_always_good_for_newcomers': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'roundup_root_url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'customs.webresponse': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'response_headers': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'text': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {})
},
'search.project': {
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['customs']
| agpl-3.0 | 2,410,658,205,368,940,500 | 54.364865 | 159 | 0.622163 | false |
dbarella/steganographic-image-processing | encoding.py | 1 | 3350 | """Script for encoding a payload into an image."""
import argparse
import pathlib
from PIL import Image, ImageMath
import utilities
def argument_parser() -> argparse.ArgumentParser:
"""Returns a configured argparser.ArgumentParser for this program."""
parser = argparse.ArgumentParser(
description='Encode SECRETS into a picture',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'host_image',
type=pathlib.Path,
help='The image that will hide the information.')
parser.add_argument(
'payload_image',
type=pathlib.Path,
help='The image that will be hidden within the host image.')
parser.add_argument(
'--significant_digits',
type=int,
default=1,
help='The number of least significant digits available to encode over.')
parser.add_argument(
'--display',
action='store_true',
default=False,
help='Display the encoded image generated by this program.')
parser.add_argument(
'--save',
action='store_true',
help='Save the encoded image generated by this program.')
parser.add_argument(
'--output_dir',
type=pathlib.Path,
default='.',
help=(
'A specific location to which the processed image will be saved. '
'If not specified, the current working directory will be used.'))
return parser
def encode(
host: Image.Image,
payload: Image.Image,
n_significant_digits: int
) -> Image.Image:
"""Encode a payload into an image (using the last n_significant_digits)."""
output_rgb_channels = []
for host_channel, payload_channel in zip(host.split(), payload.split()):
mask = utilities.bit_mask(n_significant_digits)
expression = (
"convert("
"(host & ({rgb_range} - {mask})) "
"| (payload & {mask}), 'L')".format(
rgb_range=utilities.RGB_RANGE, mask=mask))
output_rgb_channels.append(
ImageMath.eval(
expression,
host=host_channel,
payload=payload_channel))
return Image.merge(''.join(host.getbands()), output_rgb_channels)
def save(
encoded: Image.Image,
filename_seed: pathlib.Path,
output_dir: pathlib.Path
) -> None:
"""Saves the encoded image to disk to the specified output_dir."""
# Example: foo.png => foo.encoded.png
filename = (
filename_seed.with_suffix(
f'.encoded{filename_seed.suffix}')
.name)
encoded.save(output_dir.joinpath(filename), quality=100)
def main():
args = argument_parser().parse_args()
host = Image.open(args.host_image)
payload = Image.open(args.payload_image)
encoded = encode(host, payload, args.significant_digits)
# Display the encoded image
if args.display:
encoded.show()
# Save the encoded image, if the user wants us to
if args.save:
user_response = utilities.query_user(
'GONNA SAVE ENCODED IMAGE to "{0:s}"; GAR, IS THAT K???'.format(
str(args.output_dir.absolute())))
if user_response:
save(encoded, args.host_image, args.output_dir)
if __name__ == '__main__':
main()
| mit | -935,809,474,260,038,500 | 30.308411 | 80 | 0.605373 | false |
googleapis/googleapis-gen | google/cloud/notebooks/v1beta1/notebooks-v1beta1-py/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py | 1 | 34147 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
from google.cloud.notebooks_v1beta1.types import service
from google.longrunning import operations_pb2 # type: ignore
from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import NotebookServiceGrpcTransport
class NotebookServiceGrpcAsyncIOTransport(NotebookServiceTransport):
"""gRPC AsyncIO backend transport for NotebookService.
API v1beta1 service for Cloud AI Platform Notebooks.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'notebooks.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'notebooks.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def list_instances(self) -> Callable[
[service.ListInstancesRequest],
Awaitable[service.ListInstancesResponse]]:
r"""Return a callable for the list instances method over gRPC.
Lists instances in a given project and location.
Returns:
Callable[[~.ListInstancesRequest],
Awaitable[~.ListInstancesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_instances' not in self._stubs:
self._stubs['list_instances'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/ListInstances',
request_serializer=service.ListInstancesRequest.serialize,
response_deserializer=service.ListInstancesResponse.deserialize,
)
return self._stubs['list_instances']
@property
def get_instance(self) -> Callable[
[service.GetInstanceRequest],
Awaitable[instance.Instance]]:
r"""Return a callable for the get instance method over gRPC.
Gets details of a single Instance.
Returns:
Callable[[~.GetInstanceRequest],
Awaitable[~.Instance]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_instance' not in self._stubs:
self._stubs['get_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/GetInstance',
request_serializer=service.GetInstanceRequest.serialize,
response_deserializer=instance.Instance.deserialize,
)
return self._stubs['get_instance']
@property
def create_instance(self) -> Callable[
[service.CreateInstanceRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create instance method over gRPC.
Creates a new Instance in a given project and
location.
Returns:
Callable[[~.CreateInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_instance' not in self._stubs:
self._stubs['create_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/CreateInstance',
request_serializer=service.CreateInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['create_instance']
@property
def register_instance(self) -> Callable[
[service.RegisterInstanceRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the register instance method over gRPC.
Registers an existing legacy notebook instance to the
Notebooks API server. Legacy instances are instances
created with the legacy Compute Engine calls. They are
not manageable by the Notebooks API out of the box. This
call makes these instances manageable by the Notebooks
API.
Returns:
Callable[[~.RegisterInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'register_instance' not in self._stubs:
self._stubs['register_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/RegisterInstance',
request_serializer=service.RegisterInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['register_instance']
@property
def set_instance_accelerator(self) -> Callable[
[service.SetInstanceAcceleratorRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the set instance accelerator method over gRPC.
Updates the guest accelerators of a single Instance.
Returns:
Callable[[~.SetInstanceAcceleratorRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'set_instance_accelerator' not in self._stubs:
self._stubs['set_instance_accelerator'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceAccelerator',
request_serializer=service.SetInstanceAcceleratorRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['set_instance_accelerator']
@property
def set_instance_machine_type(self) -> Callable[
[service.SetInstanceMachineTypeRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the set instance machine type method over gRPC.
Updates the machine type of a single Instance.
Returns:
Callable[[~.SetInstanceMachineTypeRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'set_instance_machine_type' not in self._stubs:
self._stubs['set_instance_machine_type'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceMachineType',
request_serializer=service.SetInstanceMachineTypeRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['set_instance_machine_type']
@property
def set_instance_labels(self) -> Callable[
[service.SetInstanceLabelsRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the set instance labels method over gRPC.
Updates the labels of an Instance.
Returns:
Callable[[~.SetInstanceLabelsRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'set_instance_labels' not in self._stubs:
self._stubs['set_instance_labels'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceLabels',
request_serializer=service.SetInstanceLabelsRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['set_instance_labels']
@property
def delete_instance(self) -> Callable[
[service.DeleteInstanceRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete instance method over gRPC.
Deletes a single Instance.
Returns:
Callable[[~.DeleteInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_instance' not in self._stubs:
self._stubs['delete_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/DeleteInstance',
request_serializer=service.DeleteInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['delete_instance']
@property
def start_instance(self) -> Callable[
[service.StartInstanceRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the start instance method over gRPC.
Starts a notebook instance.
Returns:
Callable[[~.StartInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'start_instance' not in self._stubs:
self._stubs['start_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/StartInstance',
request_serializer=service.StartInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['start_instance']
@property
def stop_instance(self) -> Callable[
[service.StopInstanceRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the stop instance method over gRPC.
Stops a notebook instance.
Returns:
Callable[[~.StopInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'stop_instance' not in self._stubs:
self._stubs['stop_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/StopInstance',
request_serializer=service.StopInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['stop_instance']
@property
def reset_instance(self) -> Callable[
[service.ResetInstanceRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the reset instance method over gRPC.
Resets a notebook instance.
Returns:
Callable[[~.ResetInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'reset_instance' not in self._stubs:
self._stubs['reset_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/ResetInstance',
request_serializer=service.ResetInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['reset_instance']
@property
def report_instance_info(self) -> Callable[
[service.ReportInstanceInfoRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the report instance info method over gRPC.
Allows notebook instances to
report their latest instance information to the
Notebooks API server. The server will merge the reported
information to the instance metadata store. Do not use
this method directly.
Returns:
Callable[[~.ReportInstanceInfoRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'report_instance_info' not in self._stubs:
self._stubs['report_instance_info'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/ReportInstanceInfo',
request_serializer=service.ReportInstanceInfoRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['report_instance_info']
@property
def is_instance_upgradeable(self) -> Callable[
[service.IsInstanceUpgradeableRequest],
Awaitable[service.IsInstanceUpgradeableResponse]]:
r"""Return a callable for the is instance upgradeable method over gRPC.
Check if a notebook instance is upgradable.
Returns:
Callable[[~.IsInstanceUpgradeableRequest],
Awaitable[~.IsInstanceUpgradeableResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'is_instance_upgradeable' not in self._stubs:
self._stubs['is_instance_upgradeable'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/IsInstanceUpgradeable',
request_serializer=service.IsInstanceUpgradeableRequest.serialize,
response_deserializer=service.IsInstanceUpgradeableResponse.deserialize,
)
return self._stubs['is_instance_upgradeable']
@property
def upgrade_instance(self) -> Callable[
[service.UpgradeInstanceRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the upgrade instance method over gRPC.
Upgrades a notebook instance to the latest version.
Returns:
Callable[[~.UpgradeInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'upgrade_instance' not in self._stubs:
self._stubs['upgrade_instance'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstance',
request_serializer=service.UpgradeInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['upgrade_instance']
@property
def upgrade_instance_internal(self) -> Callable[
[service.UpgradeInstanceInternalRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the upgrade instance internal method over gRPC.
Allows notebook instances to
call this endpoint to upgrade themselves. Do not use
this method directly.
Returns:
Callable[[~.UpgradeInstanceInternalRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'upgrade_instance_internal' not in self._stubs:
self._stubs['upgrade_instance_internal'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstanceInternal',
request_serializer=service.UpgradeInstanceInternalRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['upgrade_instance_internal']
@property
def list_environments(self) -> Callable[
[service.ListEnvironmentsRequest],
Awaitable[service.ListEnvironmentsResponse]]:
r"""Return a callable for the list environments method over gRPC.
Lists environments in a project.
Returns:
Callable[[~.ListEnvironmentsRequest],
Awaitable[~.ListEnvironmentsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_environments' not in self._stubs:
self._stubs['list_environments'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/ListEnvironments',
request_serializer=service.ListEnvironmentsRequest.serialize,
response_deserializer=service.ListEnvironmentsResponse.deserialize,
)
return self._stubs['list_environments']
@property
def get_environment(self) -> Callable[
[service.GetEnvironmentRequest],
Awaitable[environment.Environment]]:
r"""Return a callable for the get environment method over gRPC.
Gets details of a single Environment.
Returns:
Callable[[~.GetEnvironmentRequest],
Awaitable[~.Environment]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_environment' not in self._stubs:
self._stubs['get_environment'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/GetEnvironment',
request_serializer=service.GetEnvironmentRequest.serialize,
response_deserializer=environment.Environment.deserialize,
)
return self._stubs['get_environment']
@property
def create_environment(self) -> Callable[
[service.CreateEnvironmentRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create environment method over gRPC.
Creates a new Environment.
Returns:
Callable[[~.CreateEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_environment' not in self._stubs:
self._stubs['create_environment'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/CreateEnvironment',
request_serializer=service.CreateEnvironmentRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['create_environment']
@property
def delete_environment(self) -> Callable[
[service.DeleteEnvironmentRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete environment method over gRPC.
Deletes a single Environment.
Returns:
Callable[[~.DeleteEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_environment' not in self._stubs:
self._stubs['delete_environment'] = self.grpc_channel.unary_unary(
'/google.cloud.notebooks.v1beta1.NotebookService/DeleteEnvironment',
request_serializer=service.DeleteEnvironmentRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['delete_environment']
__all__ = (
'NotebookServiceGrpcAsyncIOTransport',
)
| apache-2.0 | -3,841,256,649,743,030,300 | 44.108322 | 90 | 0.622456 | false |
analogue/mythbox | resources/lib/twisted/twisted/application/strports.py | 10 | 7839 | # -*- test-case-name: twisted.test.test_strports -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Port description language
This module implements a description mini-language for ports, and provides
functions to parse it and to use it to directly construct appropriate
network server services or to directly listen on them.
Here are some examples. They assume the following toy resource and factory
definitions::
class Simple(resource.Resource):
isLeaf = True
def render_GET(self, request):
return "<html>Hello, world!</html>"
class FingerProtocol(protocol.Protocol):
def connectionMade(self):
self.transport.loseConnection()
class FingerFactory(protocol.ServerFactory):
protocol = FingerProtocol
Examples using SSL require a private key and a certificate. If a private key
file name (C{privateKey}) isn't provided, a "server.pem" file is assumed to
exist which contains the private key. If the certificate file name (C{certKey})
isn't provided, the private key file is assumed to contain the certificate as
well::
>>> s=service("80", server.Site(Simple()))
>>> s=service("tcp:80", server.Site(Simple()))
>>> s=service("tcp:80:interface=127.0.0.1", server.Site(Simple()))
>>> s=service("ssl:443", server.Site(Simple()))
>>> s=service("ssl:443:privateKey=mykey.pem", server.Site(Simple()))
>>> s=service("ssl:443:privateKey=mykey.pem:certKey=cert.pem", server.Site(Simple()))
>>> s=service("unix:/var/run/finger", FingerFactory())
>>> s=service("unix:/var/run/finger:mode=660", FingerFactory())
>>> p=listen("80", server.Site(Simple()))
>>> p=listen("tcp:80", server.Site(Simple()))
>>> p=listen("tcp:80:interface=127.0.0.1", server.Site(Simple()))
>>> p=listen("ssl:443", server.Site(Simple()))
>>> p=listen("ssl:443:privateKey=mykey.pem", server.Site(Simple()))
>>> p=listen("ssl:443:privateKey=mykey.pem:certKey=cert.pem", server.Site(Simple()))
>>> p=listen("unix:/var/run/finger", FingerFactory())
>>> p=listen("unix:/var/run/finger:mode=660", FingerFactory())
>>> p=listen("unix:/var/run/finger:lockfile=0", FingerFactory())
See specific function documentation for more information.
Maintainer: Moshe Zadka
"""
from __future__ import generators
def _parseTCP(factory, port, interface="", backlog=50):
return (int(port), factory), {'interface': interface,
'backlog': int(backlog)}
def _parseUNIX(factory, address, mode='666', backlog=50, lockfile=True):
return (
(address, factory),
{'mode': int(mode, 8), 'backlog': int(backlog),
'wantPID': bool(int(lockfile))})
def _parseSSL(factory, port, privateKey="server.pem", certKey=None,
sslmethod=None, interface='', backlog=50):
from twisted.internet import ssl
if certKey is None:
certKey = privateKey
kw = {}
if sslmethod is not None:
kw['sslmethod'] = getattr(ssl.SSL, sslmethod)
cf = ssl.DefaultOpenSSLContextFactory(privateKey, certKey, **kw)
return ((int(port), factory, cf),
{'interface': interface, 'backlog': int(backlog)})
_funcs = {"tcp": _parseTCP,
"unix": _parseUNIX,
"ssl": _parseSSL}
_OP, _STRING = range(2)
def _tokenize(description):
current = ''
ops = ':='
nextOps = {':': ':=', '=': ':'}
description = iter(description)
for n in description:
if n in ops:
yield _STRING, current
yield _OP, n
current = ''
ops = nextOps[n]
elif n=='\\':
current += description.next()
else:
current += n
yield _STRING, current
def _parse(description):
args, kw = [], {}
def add(sofar):
if len(sofar)==1:
args.append(sofar[0])
else:
kw[sofar[0]] = sofar[1]
sofar = ()
for (type, value) in _tokenize(description):
if type is _STRING:
sofar += (value,)
elif value==':':
add(sofar)
sofar = ()
add(sofar)
return args, kw
def parse(description, factory, default=None):
"""
Parse the description of a reliable virtual circuit server (that is, a
TCP port, a UNIX domain socket or an SSL port) and return the data
necessary to call the reactor methods to listen on the given socket with
the given factory.
An argument with no colons means a default port. Usually the default
type is C{tcp}, but passing a non-C{None} value as C{default} will set
that as the default. Otherwise, it is a colon-separated string. The
first part means the type -- currently, it can only be ssl, unix or tcp.
After that, comes a list of arguments. Arguments can be positional or
keyword, and can be mixed. Keyword arguments are indicated by
C{'name=value'}. If a value is supposed to contain a C{':'}, a C{'='} or
a C{'\\'}, escape it with a C{'\\'}.
For TCP, the arguments are the port (port number) and, optionally the
interface (interface on which to listen) and backlog (how many clients
to keep in the backlog).
For UNIX domain sockets, the arguments are address (the file name of the
socket) and optionally the mode (the mode bits of the file, as an octal
number) and the backlog (how many clients to keep in the backlog).
For SSL sockets, the arguments are the port (port number) and,
optionally, the privateKey (file in which the private key is in),
certKey (file in which the certification is in), sslmethod (the name of
the SSL method to allow), the interface (interface on which to listen)
and the backlog (how many clients to keep in the backlog).
@type description: C{str}
@type factory: L{twisted.internet.interfaces.IProtocolFactory}
@type default: C{str} or C{None}
@rtype: C{tuple}
@return: a tuple of string, tuple and dictionary. The string is the name
of the method (sans C{'listen'}) to call, and the tuple and dictionary
are the arguments and keyword arguments to the method.
@raises ValueError: if the string is formatted incorrectly.
@raises KeyError: if the type is other than unix, ssl or tcp.
"""
args, kw = _parse(description)
if not args or (len(args)==1 and not kw):
args[0:0] = [default or 'tcp']
return (args[0].upper(),)+_funcs[args[0]](factory, *args[1:], **kw)
def service(description, factory, default=None):
"""Return the service corresponding to a description
@type description: C{str}
@type factory: L{twisted.internet.interfaces.IProtocolFactory}
@type default: C{str} or C{None}
@rtype: C{twisted.application.service.IService}
@return: the service corresponding to a description of a reliable
virtual circuit server.
See the documentation of the C{parse} function for description
of the semantics of the arguments.
"""
from twisted.application import internet
name, args, kw = parse(description, factory, default)
return getattr(internet, name+'Server')(*args, **kw)
def listen(description, factory, default=None):
"""Listen on a port corresponding to a description
@type description: C{str}
@type factory: L{twisted.internet.interfaces.IProtocolFactory}
@type default: C{str} or C{None}
@rtype: C{twisted.internet.interfaces.IListeningPort}
@return: the port corresponding to a description of a reliable
virtual circuit server.
See the documentation of the C{parse} function for description
of the semantics of the arguments.
"""
from twisted.internet import reactor
name, args, kw = parse(description, factory, default)
return getattr(reactor, 'listen'+name)(*args, **kw)
__all__ = ['parse', 'service', 'listen']
| gpl-2.0 | 8,508,588,329,813,402,000 | 38.195 | 89 | 0.657099 | false |
tmr232/Gooey | gooey/_tmp/widget_demo.py | 9 | 2979 | '''
Created on Dec 21, 2013
@author: Chris
'''
import sys
import hashlib
from time import time as _time, time
from time import sleep as _sleep
# from argparse import ArgumentParser
# import argparse
import argparse as ap
from argparse import ArgumentParser as AP
from gooey import Gooey
from gooey import GooeyParser
def main():
desc = "Example application to show Gooey's various widgets"
file_help_msg = "Name of the file you want to process"
my_cool_parser = GooeyParser(description=desc)
my_cool_parser.add_argument("filename", help=file_help_msg, widget="FileChooser") # positional
my_cool_parser.add_argument("directory", help="Directory to store output") # positional
my_cool_parser.add_argument('-c', '--countdown', default=2, type=int, help='sets the time to count down from you see its quite simple!')
my_cool_parser.add_argument('-j', '--cron-schedule', type=int, help='Set the datetime when the cron should begin', widget='DateChooser')
my_cool_parser.add_argument("-s", "--showtime", action="store_true", help="display the countdown timer")
my_cool_parser.add_argument("-d", "--delay", action="store_true", help="Delay execution for a bit")
my_cool_parser.add_argument('-v', '--verbose', action='count')
my_cool_parser.add_argument("-o", "--obfuscate", action="store_true", help="obfuscate the countdown timer!")
my_cool_parser.add_argument('-r', '--recursive', choices=['yes', 'no'], help='Recurse into subfolders')
my_cool_parser.add_argument("-w", "--writelog", default="No, NOT whatevs", help="write log to some file or something")
my_cool_parser.add_argument("-e", "--expandAll", action="store_true", help="expand all processes")
verbosity = my_cool_parser.add_mutually_exclusive_group()
verbosity.add_argument('-t', '--verbozze', dest='verbose', action="store_true", help="Show more details")
verbosity.add_argument('-q', '--quiet', dest='quiet', action="store_true", help="Only output on error")
print my_cool_parser._actions
print 'inside of main(), my_cool_parser =', my_cool_parser
args = my_cool_parser.parse_args()
print 'EHOOOOOOOOOOOO'
print sys.argv
print args.countdown
print args.showtime
start_time = _time()
print 'Counting down from %s' % args.countdown
while _time() - start_time < args.countdown:
if args.showtime:
print 'printing message at: %s' % _time()
else:
print 'printing message at: %s' % hashlib.md5(str(_time())).hexdigest()
_sleep(.5)
print 'Finished running the program. Byeeeeesss!'
raise ValueError("Something has gone wrong! AHHHHHHHHHHH")
def here_is_smore():
pass
if __name__ == '__main__':
print sys.argv
main()
# import inspect
# import dis
# # print dir(main.__code__)
# # for i in dir(main.__code__):
# # print i, getattr(main.__code__, i)
# print dis.dis(main.__code__)
# # for i in inspect.getmembers(main):
# # print i
| mit | 3,309,166,735,387,071,000 | 39.375 | 138 | 0.670695 | false |
Joergen/zamboni | mkt/carriers/tests.py | 4 | 2046 | import mock
from nose.tools import eq_
from test_utils import RequestFactory
from amo.tests import TestCase
from amo.urlresolvers import reverse, set_url_prefix
from . import get_carrier, set_carrier, context_processors
from .middleware import CarrierURLMiddleware
class TestCarrierURLs(TestCase):
def setUp(self):
set_carrier(None)
set_url_prefix(None)
def request(self, url):
request = RequestFactory().get(url)
# Simulate the RequestCookiesMiddleware.
request.set_cookie = mock.Mock()
return request
def get(self, url, request=None):
if not request:
request = self.request(url)
CarrierURLMiddleware().process_request(request)
return request
def test_ignore_non_carriers(self):
request = self.get('/not-a-store')
eq_(request.path_info, '/not-a-store')
assert not request.set_cookie.called
def test_set_carrier(self):
request = self.get('/?carrier=telefonica')
eq_(get_carrier(), 'telefonica')
assert request.set_cookie.called
def test_set_carrier_none(self):
request = self.request('/?carrier=')
request.COOKIES = {'carrier': 'telefonica'}
request = self.get('/?carrier=', request)
eq_(get_carrier(), None)
assert request.set_cookie.called
def test_set_carrier_to_none_url(self):
self.get('/telefonica/')
self.get('/not-a-store')
eq_(get_carrier(), None)
self.get('/?carrier=telefonica')
self.get('/?carrier=not-a-store')
eq_(get_carrier(), None)
def test_reverse(self):
self.get('/telefonica/')
eq_(reverse('manifest.webapp'), '/manifest.webapp')
self.get('/?carrier=telefonica')
eq_(reverse('manifest.webapp'), '/manifest.webapp')
def test_context(self):
request = self.get('/?carrier=telefonica')
eq_(request.path_info, '/')
ctx = context_processors.carrier_data(request)
eq_(ctx['CARRIER'], 'telefonica')
| bsd-3-clause | 6,380,424,987,262,263,000 | 29.537313 | 59 | 0.626588 | false |
abstrakraft/rug | rug/project.py | 1 | 28881 | #!/usr/bin/env python
import os
import sys
import xml.dom.minidom
import config
import manifest
import git
import hierarchy
import output
class RugError(StandardError):
pass
class InvalidProjectError(RugError):
pass
RUG_DIR = '.rug'
#TODO: should this be configurable or in the manifest?
RUG_SHA_RIDER = 'refs/rug/sha_rider'
RUG_DEFAULT_DEFAULT = {'revision': 'master', 'vcs': 'git'}
RUG_CONFIG = 'config'
RUG_REPO_CONFIG_SECTION = 'repoconfig'
RUG_CANDIDATE_TEMPLATES = ['%s', '%s/.rug/manifest', '%s/manifest']
class Revset(git.Rev):
@staticmethod
def find_repo(repo_finder):
return repo_finder.manifest_repo
class Project(object):
vcs_class = {}
def __init__(self, project_dir, output_buffer=None):
if output_buffer is None:
output_buffer = output.NullOutputBuffer()
self.output = output_buffer
self.dir = os.path.abspath(project_dir)
#Verify validity
if not self.valid_project(self.dir):
raise InvalidProjectError('not a valid rug project')
#Decide if bare
self.bare = self.valid_bare_project(self.dir)
#Create convenient properties
if self.bare:
self.rug_dir = self.dir
else:
self.rug_dir = os.path.join(self.dir, RUG_DIR)
self.manifest_dir = os.path.join(self.rug_dir, 'manifest')
self.manifest_filename = os.path.join(self.manifest_dir, 'manifest.xml')
self.manifest_repo = git.Repo(self.manifest_dir, output_buffer=self.output.spawn('manifest: '))
self.read_manifest()
def read_manifest(self):
'''Project.read_manifest() -- read the manifest file.'''
(self.remotes, self.repos) = manifest.read(self.manifest_filename, default_default=RUG_DEFAULT_DEFAULT)
if not self.bare:
for path in self.repos:
abs_path = os.path.abspath(os.path.join(self.dir, path))
R = self.vcs_class[self.repos[path]['vcs']]
if R.valid_repo(abs_path):
self.repos[path]['repo'] = R(abs_path, output_buffer=self.output.spawn(path + ': '))
else:
self.repos[path]['repo'] = None
@classmethod
def register_vcs(cls, vcs, vcs_class):
cls.vcs_class[vcs] = vcs_class
@classmethod
def find_project(cls, project_dir=None, output_buffer=None):
'Project.find_project(project_dir=pwd) -> project -- climb up the directory tree looking for a valid rug project'
if project_dir == None:
project_dir = os.getcwd()
head = project_dir
while head:
try:
return cls(head, output_buffer=output_buffer)
except InvalidProjectError:
if head == os.path.sep:
head = None
else:
(head, tail) = os.path.split(head)
raise InvalidProjectError('not a valid rug project')
@classmethod
def init(cls, project_dir, bare=False, output_buffer=None):
'Project.init -- initialize a new rug repository'
if output_buffer is None:
output_buffer = output.NullOutputBuffer()
if project_dir == None:
project_dir = '.'
if cls.valid_project(project_dir):
raise RugError('%s is an existing rug project' % project_dir)
if bare:
rug_dir = project_dir
else:
rug_dir = os.path.join(project_dir, RUG_DIR)
os.makedirs(rug_dir)
config_file = os.path.join(rug_dir, RUG_CONFIG)
open(config_file, 'w').close()
manifest_dir = os.path.join(rug_dir, 'manifest')
manifest_filename = os.path.join(manifest_dir, 'manifest.xml')
mr = git.Repo.init(manifest_dir, output_buffer=output_buffer.spawn('manifest: '))
manifest.write(manifest_filename, {}, {}, {})
mr.add(os.path.basename(manifest_filename))
mr.commit('Initial commit')
return cls(project_dir, output_buffer=output_buffer)
@classmethod
def clone(cls, url, project_dir=None, source=None, revset=None, bare=False, repo_config=None, output_buffer=None):
'Project.clone -- clone an existing rug repository'
if output_buffer is None:
output_buffer = output.NullOutputBuffer()
#TODO: more output
#calculate directory
if project_dir == None:
basename = os.path.basename(url)
if len(basename) == 0:
basename = os.path.basename(url[:-1])
project_dir = os.path.splitext(basename)[0]
project_dir = os.path.abspath(project_dir)
#verify directory doesn't exist
if os.path.exists(project_dir):
raise RugError('Directory already exists')
if bare:
rug_dir = project_dir
else:
rug_dir = os.path.join(project_dir, RUG_DIR)
os.makedirs(rug_dir)
config_file = os.path.join(rug_dir, RUG_CONFIG)
open(config_file, 'w').close()
manifest_dir = os.path.join(rug_dir, 'manifest')
manifest_filename = os.path.join(manifest_dir, 'manifest.xml')
#clone manifest repo into rug directory
candidate_urls = map(lambda c: c % url, RUG_CANDIDATE_TEMPLATES)
clone_url = None
for cu in candidate_urls:
if git.Repo.valid_repo(cu, config=repo_config):
clone_url = cu
break
if clone_url:
git.Repo.clone(clone_url, repo_dir=manifest_dir, remote=source, rev=revset,
config=repo_config, output_buffer=output_buffer.spawn('manifest: '))
else:
raise RugError('%s does not seem to be a rug project' % url)
#verify valid manifest
if not os.path.exists(manifest_filename):
raise RugError('invalid manifest repo: no manifest.xml')
output_buffer.append('%s cloned into %s' % (url, project_dir))
#checkout revset
p = cls(project_dir, output_buffer=output_buffer)
if repo_config is not None:
for (name, value) in repo_config.items():
p.set_config(RUG_REPO_CONFIG_SECTION, name, value)
p.checkout(revset)
return p
@classmethod
def valid_project(cls, project_dir, include_bare=True):
'Project.valid_project(project_dir) -- verify the minimum qualities necessary to be called a rug project'
return cls.valid_working_project(project_dir) or include_bare and cls.valid_bare_project(project_dir)
@classmethod
def valid_working_project(cls, project_dir):
manifest_dir = os.path.join(project_dir, RUG_DIR, 'manifest')
return git.Repo.valid_repo(manifest_dir) \
and os.path.exists(os.path.join(manifest_dir, 'manifest.xml'))
@classmethod
def valid_bare_project(cls, project_dir):
manifest_dir = os.path.join(project_dir, 'manifest')
return git.Repo.valid_repo(manifest_dir) \
and os.path.exists(os.path.join(manifest_dir, 'manifest.xml'))
def set_config(self, section, name, value):
config_file = os.path.join(self.rug_dir, RUG_CONFIG)
cf = config.ConfigFile.from_path(config_file)
cf.set(section, name, value)
cf.to_path(config_file)
def get_config(self, section, name=None):
config_file = os.path.join(self.rug_dir, RUG_CONFIG)
cf = config.ConfigFile.from_path(config_file)
return cf.get(section, name)
def get_branch_names(self, r):
revision = r.get('revision', 'HEAD')
repo = r['repo']
if revision == 'HEAD':
start = len('refs/remotes/%s/' % r['remote'])
revision = repo.symbolic_ref('refs/remotes/%s/HEAD' % r['remote'])[start:]
ret = {}
if repo.valid_sha(revision):
#TODO: rethink how this works for sha repos
ret['live_porcelain'] = revision
ret['live_plumbing'] = revision
ret['rug'] = 'refs/rug/heads/%s/%s/sha/rug_index' % (self.revset().get_short_name(), r['remote'])
ret['rug_index'] = 'refs/rug/rug_index'
ret['bookmark'] = 'refs/rug/bookmarks/%s/%s/sha/bookmark' % (self.revset().get_short_name(), r['remote'])
ret['bookmark_index'] = 'refs/rug/bookmark_index'
ret['remote'] = revision
else:
ret['live_porcelain'] = revision
ret['live_plumbing'] = 'refs/heads/%s' % revision
ret['rug'] = 'refs/rug/heads/%s/%s/%s' % (self.revset().get_short_name(), r['remote'], revision)
ret['rug_index'] = 'refs/rug/rug_index'
ret['bookmark'] = 'refs/rug/bookmarks/%s/%s/%s' % (self.revset().get_short_name(), r['remote'], revision)
ret['bookmark_index'] = 'refs/rug/bookmark_index'
ret['remote'] = '%s/%s' % (r['remote'], revision)
return ret
def source_list(self):
return self.manifest_repo.remote_list()
def source_add(self, source, url):
return self.manifest_repo.remote_add(source, url)
def source_set_url(self, source, url):
return self.manifest_repo.remote_set_url(source, url)
def source_set_head(self, source):
return self.manifest_repo.remote_set_head(source)
def revset(self):
'return the current revset'
return Revset.cast(self, self.manifest_repo.head())
def revset_list(self):
'return the list of available revsets'
#TODO: refs or branches?
return map(lambda rs: Revset.cast(self, rs), self.manifest_repo.ref_list())
def revset_create(self, dst, src=None):
'create a new revset'
self.manifest_repo.branch_create(dst, src)
def revset_delete(self, dst, force=False):
'delete a revset'
self.manifest_repo.branch_delete(dst, force)
def status(self, porcelain=True, recursive=True):
#TODO: return objects or text?
#TODO: could add manifest status
if self.bare:
raise NotImplementedError('status not implemented for bare projects')
#TODO: think through this
if porcelain:
ret = {}
for r in self.repos.values():
stat = self.repo_status(r['path'])
if recursive:
if stat == 'D':
ret[r['path']] = [stat, None]
else:
r_stat = r['repo'].status(porcelain=True)
ret[r['path']] = [stat, r_stat]
else:
ret[r['path']] = stat
else:
stat = ['On revset %s:' % self.revset().get_short_name()]
diff = self.manifest_repo.diff()
if diff:
stat.append('manifest diff:')
stat.extend(map(lambda line: '\t' + line, self.manifest_repo.diff().split('\n')))
for r in self.repos.values():
repo = r['repo']
if repo is None:
stat.append('repo %s missing' % r['path'])
else:
stat.append('repo %s (%s):' % (r['path'], self.repo_status(r['path'])))
stat.extend(map(lambda line: '\t' + line, r['repo'].status(porcelain=False).split('\n')))
ret = '\n'.join(stat)
return ret
def dirty(self):
#TODO: currently, "dirty" is defined as "would commit -a do anything"
#this seems to work, but needs further consideration
if self.manifest_repo.dirty():
return True
else:
for r in self.repos.values():
if self.repo_status(r['path']):
return True
def repo_status(self, path):
#"Index" (manifest working tree) info
index_r = self.repos.get(path)
#Committed revset info
manifest_blob_id = self.manifest_repo.get_blob_id('manifest.xml')
commit_repos = manifest.read_from_string(
self.manifest_repo.show(manifest_blob_id),
default_default=RUG_DEFAULT_DEFAULT
)[1]
commit_r = commit_repos.get(path)
#Working tree info
if index_r:
repo = index_r['repo']
elif committed_r:
abs_path = os.path.abspath(os.path.join(self.dir, path))
R = self.vcs_class[self.repos[path]['vcs']]
if R.valid_repo(abs_path):
repo = R(abs_path, output_buffer=self.output.spawn(path + ': '))
else:
#not in index, doesn't work in the tree (assume deleted), but is in the commit
return 'D '
elif os.path.exists(path):
#untracked - I think it should be ' A', but git would call it '??'
return '??'
else:
#doesn't exist in any form - how the #!@? did this even get called?
return '#!@?'
#Status1: commit..index diff
if not commit_r:
if index_r:
status1 = 'A'
else:
status1 = ' '
elif not index_r:
status1 = 'D'
elif commit_r['revision'] != index_r['revision']:
status1 = 'R'
else:
status1 = ' '
#Status2: index..working_tree diff
if not index_r:
if repo:
status2 = 'A'
else:
status2 = ' '
elif not repo:
status2 = 'D'
else:
branches = self.get_branch_names(index_r)
head = repo.head()
if repo.valid_sha(index_r['revision']):
#the revision in the manifest could be an abbreviation
if head.get_sha().startswith(index_r['revision']):
status2 = ' '
else:
#Revision changed names: Revision
status2 = 'R'
else:
if (head.get_short_name() != index_r['revision']):
#Revision changed names: Revision
status2 = 'R'
else:
if repo.valid_rev(branches['rug_index']):
index_branch = branches['rug_index']
else:
index_branch = branches['rug']
index_rev = repo.rev_class(repo, index_branch)
if head.get_sha() == index_rev.get_sha():
status2 = ' '
else:
#Branch definition changed: Branch
status2 = 'B'
return status1 + status2
def remote_list(self):
return self.remotes.keys()
def remote_add(self, remote, fetch):
(remotes, repos, default) = manifest.read(self.manifest_filename, apply_default=False)
if not remotes.has_key(remote):
remotes[remote] = {'name':remote}
remotes[remote]['fetch'] = fetch
manifest.write(self.manifest_filename, remotes, repos, default)
self.read_manifest()
self.output.append('remote %s added' % remote)
def default_add(self, field, value):
(remotes, repos, default) = manifest.read(self.manifest_filename, apply_default=False)
default[field] = value
manifest.write(self.manifest_filename, remotes, repos, default)
self.read_manifest()
self.output.append('default added: %s=%s' % (field, value))
def checkout(self, revset=None):
'check out a revset'
#Checkout manifest manifest
if revset is None:
revset = self.revset()
revset = Revset.cast(self, revset)
#Always throw away local rug changes - uncommitted changes to the manifest.xml file are lost
self.manifest_repo.checkout(revset, force=True)
#reread manifest
self.read_manifest()
if not self.bare:
sub_repos = hierarchy.hierarchy(self.repos.keys())
for r in self.repos.values():
url = self.remotes[r['remote']]['fetch'] + '/' + r['name']
#if the repo doesn't exist, clone it
repo = r['repo']
if not repo:
self.create_repo(r, sub_repos[r['path']])
else:
#Verify remotes
if r['remote'] not in repo.remote_list():
repo.remote_add(r['remote'], url)
else:
candidate_urls = map(lambda c: c % url, RUG_CANDIDATE_TEMPLATES)
if repo.config('remote.%s.url' % r['remote']) not in candidate_urls:
clone_url = None
for cu in candidate_urls:
if git.Repo.valid_repo(cu, config=repo_config):
clone_url = cu
break
if clone_url:
repo.remote_set_url(r['remote'], clone_url)
else:
raise RugError('%s does not seem to be a rug project' % url)
#Fetch from remote
#TODO:decide if we should always do this here. Sometimes have to, since we may not have
#seen this remote before
repo.fetch(r['remote'])
branches = self.get_branch_names(r)
#create rug and bookmark branches if they don't exist
#branches are fully qualified ('refs/...') branch names, so use update_ref
#instead of create_branch
for b in ['rug', 'bookmark']:
if not repo.valid_rev(branches[b]):
repo.update_ref(branches[b], branches['remote'])
for b in ['rug_index', 'bookmark_index']:
if repo.valid_rev(branches[b]):
repo.delete_ref(branches[b])
#create and checkout the live branch
repo.update_ref(branches['live_plumbing'], branches['rug'])
repo.checkout(branches['live_porcelain'])
self.output.append('revset %s checked out' % revset.get_short_name())
def create_repo(self, r, sub_repos):
if self.bare:
raise RugError('Invalid operation for bare project')
abs_path = os.path.abspath(os.path.join(self.dir, r['path']))
url = self.remotes[r['remote']]['fetch'] + '/' + r['name']
R = self.vcs_class[r['vcs']]
try:
config = self.get_config(RUG_REPO_CONFIG_SECTION)
except KeyError:
config = None
repo = R.clone(url, repo_dir=abs_path, remote=r['remote'], rev=r.get('revision', None), config=config, output_buffer=self.output.spawn(r['path'] + ': '))
if r['path'] == '.':
repo.add_ignore(RUG_DIR)
for sr in sub_repos:
repo.add_ignore(os.path.relpath(sr, r['path']))
r['repo'] = repo
branches = self.get_branch_names(r)
for b in ['live_plumbing', 'rug', 'bookmark']:
repo.update_ref(branches[b], branches['remote'])
repo.checkout(branches['live_porcelain'])
def fetch(self, source=None, repos=None):
self.manifest_repo.fetch(source)
if not self.bare:
if repos is None:
repos = self.repos.values()
else:
#TODO: turn list of strings into repos
pass
for r in repos:
repo = r['repo']
if repo:
repo.fetch(r['remote'])
repo.remote_set_head(r['remote'])
#TODO:output
def update(self, recursive=False):
#TODO: implement per repo update
repos = self.repos.values()
#if repos is None:
# repos = self.repos.values()
#else:
# #TODO: turn list of strings into repos
# pass
if self.dirty():
raise RugError('Project has uncommitted changes - commit before updating')
#TODO:update manifest?
sub_repos = hierarchy.hierarchy(self.repos.keys())
for r in repos:
repo = r['repo']
if repo:
#Get Branch names, revs, etc.
branches = self.get_branch_names(r)
head_rev = repo.head()
if not repo.valid_rev(branches['remote']):
self.output.append('remote branch does not exist in %s: no update' % r['path'])
else:
remote_rev = repo.rev_class(repo, branches['remote'])
#We don't touch the bookmark branch here - we refer to bookmark index branch if it exists,
#or bookmark branch if not, and update the bookmark index branch if necessary. Commit updates
#bookmark branch and removes bookmark index
if repo.valid_rev(branches['bookmark_index']):
bookmark_rev = repo.rev_class(repo, branches['bookmark_index'])
elif repo.valid_rev(branches['bookmark']):
bookmark_rev = repo.rev_class(repo, branches['bookmark'])
else:
bookmark_rev = None
#Check if there are no changes
if head_rev.get_sha() == remote_rev.get_sha():
self.output.append('%s is up to date with upstream repo: no update' % r['path'])
elif head_rev.is_descendant(remote_rev):
self.output.append('%s is ahead of upstream repo: no update' % r['path'])
#Fast-Forward if we can
elif head_rev.can_fastforward(remote_rev):
self.output.append('%s is being fast-forward to upstream repo' % r['path'])
repo.merge(remote_rev)
repo.update_ref(branches['bookmark_index'], remote_rev)
#otherwise rebase/merge local work
elif bookmark_branch and head_rev.is_descendant(bookmark_branch):
#TODO: currently dead code - we check for dirtyness at the top of the function
if repo.dirty():
#TODO: option to stash, rebase, then reapply?
self.output.append('%s has local uncommitted changes and cannot be rebased. Skipping this repo.' % r['path'])
else:
#TODO: option to merge instead of rebase
#TODO: handle merge/rebase conflicts
#TODO: remember if we're in a conflict state
self.output.append('%s is being rebased onto upstream repo' % r['name'])
[ret,out,err] = repo.rebase(bookmark_branch, onto=branches['remote'])
if ret:
self.output.append(out)
else:
repo.update_ref(branches['bookmark_index'], branches['remote'])
elif not bookmark_branch:
self.output.append('%s has an unusual relationship with the remote branch, and no bookmark. Skipping this repo.' % r['path'])
#Fail
#TODO: currently dead code - we check for dirtyness at the top of the function
elif head_rev.get_short_name() != r['revision']:
self.output.append('%s has changed branches and cannot be safely updated. Skipping this repo.' % r['path'])
else:
#Weird stuff has happened - right branch, wrong relationship to bookmark
self.output.append('You are out of your element. The current branch in %s has been in altered in an unusal way and must be manually updated.' % r['path'])
else:
repo = self.create_repo(r, sub_repos[r['path']])
self.output.append('Deleted repo %s check out' % r['path'])
if recursive:
repo.update(recursive)
def add(self, path, name=None, remote=None, rev=None, vcs=None, use_sha=None):
#TODO:handle lists of dirs
(remotes, repos, default) = manifest.read(self.manifest_filename, apply_default=False)
lookup_default = {}
lookup_default.update(RUG_DEFAULT_DEFAULT)
lookup_default.update(default)
update_rug_branch = False
r = self.repos.get(path, None)
if r is None:
# Validate inputs
if name is None:
raise RugError('new repos must specify a name')
if remote is None:
raise RugError('new repos must specify a remote')
if self.bare:
if rev is None:
raise RugError('new repos in bare projects must specify a rev')
if vcs is None:
raise RugError('new repos in bare projects must specify a vcs')
if self.bare:
#Can't really test/validate anything here since there's no repo
#Hope the user knows what they're doing
#Add the repo
repos[path] = {'path': path}
revision = rev
else:
if r is None:
#New repository
#Find vcs if not specified, and create repo object
abs_path = os.path.join(self.dir, path)
if vcs is None:
repo = None
#TODO: rug needs to take priority here, as rug repos with sub-repos at '.'
#will look like the sub-repo vcs as well as a rug repo
#(but not if the path of the sub-repo is '.')
for (try_vcs, R) in self.vcs_class.items():
if R.valid_repo(abs_path):
repo = R(abs_path, output_buffer=self.output.spawn(path + ': '))
vcs = try_vcs
break
if repo is None:
raise RugError('unrecognized repo %s' % path)
else:
repo = self.vcs_class[vcs](abs_path, output_buffer=self.output.spawn(path + ': '))
#Add the repo
repos[path] = {'path': path}
#TODO: we don't know if the remote even exists yet, so can't set up all branches
#logic elsewhere should be able to handle this possibility (remote & bookmark branches don't exist)
update_rug_branch = True
#TODO: should this be required? If not, what should the default be?
if use_sha is None:
use_sha = False
else:
#Modify existing repo
repo = r['repo']
#TODO: rethink this condition
if remote is not None:
update_rug_branch = True
#If use_sha is not specified, look at existing manifest revision
if use_sha is None:
use_sha = repo.valid_sha(r.get('revision', lookup_default['revision']))
#Get the rev
if rev is None:
rev = repo.head()
else:
rev = repo.rev_class.cast(repo, rev)
if use_sha:
rev = repo.rev_class(repo, rev.get_sha())
revision = rev.get_short_name()
#Update repo properties
for p in ['revision', 'name', 'remote', 'vcs']:
pval = locals()[p]
if (pval is not None) and (pval != lookup_default.get(p)):
repos[path][p] = pval
#Write the manifest and reload repos
manifest.write(self.manifest_filename, remotes, repos, default)
self.read_manifest()
if not self.bare:
r = self.repos[path]
repo = r['repo']
branches = self.get_branch_names(r)
#Update rug_index
repo.update_ref(branches['rug_index'], rev)
#If this is a new repo, set the rug branch
if update_rug_branch:
repo.update_ref(branches['rug'], rev)
self.output.append("%s added to manifest" % path)
def remove(self, path):
"""
Remove a repo from the manifest
"""
(remotes, repos, default) = manifest.read(self.manifest_filename, apply_default=False)
lookup_default = {}
lookup_default.update(RUG_DEFAULT_DEFAULT)
lookup_default.update(default)
if path not in repos:
raise RugError('unrecognized repo %s' % path)
del(repos[path])
manifest.write(self.manifest_filename, remotes, repos, default)
self.read_manifest()
self.output.append("%s removed from manifest" % path)
def commit(self, message=None, all=False, recursive=False):
if not self.bare:
for r in self.repos.values():
repo = r['repo']
if all:
#commit if needed
if recursive and repo.dirty():
if message is None:
raise RugError('commit message required')
repo.commit(message, all=True)
#add if needed
status = self.repo_status(r['path'])
if ('B' in status) or ('R' in status):
self.add(r['path'])
r = self.repos[r['path']]
branches = self.get_branch_names(r)
if repo.valid_rev(branches['rug_index']):
repo.update_ref(branches['rug'], branches['rug_index'])
repo.delete_ref(branches['rug_index'])
if repo.valid_rev(branches['bookmark_index']):
repo.update_ref(branches['bookmark'], branches['bookmark_index'])
repo.delete_ref(branches['bookmark_index'])
#TODO: what about untracked files?
if self.manifest_repo.dirty():
if message is None:
raise RugError('commit message required')
self.manifest_repo.commit(message, all=True)
self.output.append("committed revset %s to %s" % (self.revset().get_short_name(), self.dir))
#TODO: remove this quick hack
def test_publish(self, remote=None):
return self.publish(remote, test=True)
def publish(self, source=None, test=False):
if source is None:
source = 'origin'
if not source in self.source_list():
raise RugError('unrecognized source %s' % source)
#TODO: This may not be the best way to do this, but we need the manifest
#as of the last commit.
do_manifest_stash_pop = False
if self.manifest_repo.dirty():
do_manifest_stash_pop = True
self.manifest_repo.stash()
self.read_manifest()
#TODO: use manifest.read with apply_default=False
error = []
#Verify that we can push to all unpublished remotes
ready = True
repo_updates = []
if not self.bare:
for r in self.repos.values():
repo = r['repo']
branches = self.get_branch_names(r)
update_repo = False
if not repo.valid_rev(branches['remote']):
update_repo = True
else:
rug_rev = repo.rev_class(repo, branches['rug'])
remote_rev = repo.rev_class(repo, branches['remote'])
update_repo = rug_rev.get_sha() != remote_rev.get_sha()
if update_repo:
#TODO: verify correctness & consistency of path functions/formats throughout rug
if repo.valid_sha(r['revision']):
#TODO: PROBLEM: branches pushed as sha_riders may not have heads associated with them,
#which means that clones won't pull them down
refspec = '%s:refs/heads/%s' % (r['revision'], RUG_SHA_RIDER)
force = True
else:
refspec = '%s:refs/heads/%s' % (branches['rug'], r['revision'])
force = False
repo_updates.append((r, refspec, force))
if not repo.test_push(r['remote'], refspec, force=force):
error.append('%s: %s cannot be pushed to %s' % (r['name'], r['revision'], r['remote']))
ready = False
#Verify that we can push to manifest repo
#TODO: We don't always need to push manifest repo
manifest_revision = self.revset()
if manifest_revision.is_sha():
manifest_refspec = '%s:refs/heads/%s' % (manifest_revision.get_sha(), RUG_SHA_RIDER)
manifest_force = True
else:
manifest_refspec = manifest_revision.get_short_name()
manifest_force = False
if not self.manifest_repo.test_push(source, manifest_refspec, force=manifest_force):
error.append('manifest branch %s cannot be pushed to %s' % (manifest_revision.get_short_name(), source))
ready = False
if test:
return ready
#Error if we can't publish anything
if not ready:
raise RugError('\n'.join(error))
#Push unpublished remotes
for (r, refspec, force) in repo_updates:
repo = r['repo']
repo.push(r['remote'], refspec, force)
branches = self.get_branch_names(r)
repo.update_ref(branches['bookmark'], branches['rug'])
self.output.append('%s: pushed %s to %s' % (r['name'], r['revision'], r['remote']))
#Push manifest
#TODO: we've taken steps to predict errors, but failure can still happen. Need to
#leave the repo in a consistent state if that happens
self.manifest_repo.push(source, manifest_refspec, force=manifest_force)
self.output.append('manifest branch %s pushed to %s' % (manifest_revision.get_short_name(), source))
if do_manifest_stash_pop:
self.manifest_repo.stash_pop()
self.read_manifest()
#TODO: define precisely what this should do
#def reset(self, optlist=[], repos=None):
# if repos is None:
# repos = self.repos
# rug_branch = 'rug/%s/%s' % (self.origin(), self.revset())
#
# if optlist.has_key('soft'):
# mode = git.Repo.SOFT
# elif optlist.has_key('mixed'):
# mode = git.Repo.MIXED
# elif optlist.has_key('hard'):
# mode = git.Repo.HARD
#
# for r in repos:
# r.checkout(rug_branch, mode = mode)
Project.register_vcs('git', git.Repo)
#The following code was necessary before manual clone was implemented in order to
#clone into a non-empty directory
#if os.path.exists(path) and (not os.path.isdir(path)):
# RugError('path %s already exists and is not a directory' % (path,))
#elif os.path.isdir(path) and (os.listdir(path) != []):
# tmp_path = tempfile(dir='.')
# #todo: proper path join (detect foreign OS)
# repo = git.Repo.clone(self.remotes[p['remote']]['fetch'] + '/' + p['name'], tmp_path)
# #move tmp_path to path
# #rmdir tmp_path
#else:
# #path is an empty directory, or doesn't exist
# #todo: proper path join (detect foreign OS)
| gpl-3.0 | -1,158,218,370,191,547,100 | 32.082474 | 161 | 0.665836 | false |
jjmleiro/hue | desktop/core/src/desktop/migrations/0014_auto__add_unique_document_content_type_object_id.py | 8 | 10363 | # -*- coding: utf-8 -*-
import logging
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models, transaction
from desktop.models import Document
class Migration(SchemaMigration):
def forwards(self, orm):
# If there are duplicated documents, we'll have an error when we try to
# create this index. So to protect against that, we should delete those
# documents before we create the index.
# We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
# See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
try:
db.start_transaction()
duplicated_records = Document.objects \
.values('content_type_id', 'object_id') \
.annotate(id_count=models.Count('id')) \
.filter(id_count__gt=1)
# Delete all but the first document.
for record in duplicated_records:
docs = Document.objects \
.values_list('id', flat=True) \
.filter(
content_type_id=record['content_type_id'],
object_id=record['object_id'],
)[1:]
docs = list(docs)
logging.warn('Deleting documents %s' % docs)
Document.objects.filter(id__in=docs).delete()
db.commit_transaction()
except Exception, e:
db.rollback_transaction()
raise e
try:
db.start_transaction()
# Adding unique constraint on 'Document', fields ['content_type', 'object_id']
db.create_unique(u'desktop_document', ['content_type_id', 'object_id'])
db.commit_transaction()
except Exception, e:
db.rollback_transaction()
raise e
def backwards(self, orm):
# Removing unique constraint on 'Document', fields ['content_type', 'object_id']
db.delete_unique(u'desktop_document', ['content_type_id', 'object_id'])
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'desktop.document': {
'Meta': {'unique_together': "(('content_type', 'object_id'),)", 'object_name': 'Document'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'extra': ('django.db.models.fields.TextField', [], {'default': "''"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'doc_owner'", 'to': u"orm['auth.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['desktop.DocumentTag']", 'db_index': 'True', 'symmetrical': 'False'}),
'version': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'})
},
u'desktop.document2': {
'Meta': {'object_name': 'Document2'},
'data': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'dependencies': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'dependencies_rel_+'", 'db_index': 'True', 'to': u"orm['desktop.Document2']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'extra': ('django.db.models.fields.TextField', [], {'default': "''"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_history': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'doc2_owner'", 'to': u"orm['auth.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'tags_rel_+'", 'db_index': 'True', 'to': u"orm['desktop.Document2']"}),
'type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'d85c2c3a-b9ee-494d-a946-702719c129c4'", 'max_length': '36', 'db_index': 'True'}),
'version': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'db_index': 'True'})
},
u'desktop.documentpermission': {
'Meta': {'object_name': 'DocumentPermission'},
'doc': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['desktop.Document']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'db_index': 'True', 'to': u"orm['auth.Group']", 'db_table': "'documentpermission_groups'", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'perms': ('django.db.models.fields.CharField', [], {'default': "'read'", 'max_length': '10'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'db_index': 'True', 'to': u"orm['auth.User']", 'db_table': "'documentpermission_users'", 'symmetrical': 'False'})
},
u'desktop.documenttag': {
'Meta': {'unique_together': "(('owner', 'tag'),)", 'object_name': 'DocumentTag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'tag': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
u'desktop.settings': {
'Meta': {'object_name': 'Settings'},
'collect_usage': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tours_and_tutorials': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'})
},
u'desktop.userpreferences': {
'Meta': {'object_name': 'UserPreferences'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {'max_length': '4096'})
}
}
complete_apps = ['desktop']
| apache-2.0 | -6,683,750,418,178,323,000 | 65.429487 | 195 | 0.555438 | false |
vipul-sharma20/oh-mainline | vendor/packages/Django/django/contrib/auth/tests/hashers.py | 28 | 7337 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.global_settings import PASSWORD_HASHERS as default_hashers
from django.contrib.auth.hashers import (is_password_usable,
check_password, make_password, PBKDF2PasswordHasher, load_hashers,
PBKDF2SHA1PasswordHasher, get_hasher, identify_hasher, UNUSABLE_PASSWORD)
from django.utils import unittest
from django.utils.unittest import skipUnless
try:
import crypt
except ImportError:
crypt = None
try:
import bcrypt
# Django 1.5 works only with py-bcrypt, not with bcrypt. py-bcrypt has
# '_bcrypt' attribute, bcrypt doesn't.
if not hasattr(bcrypt, '_bcrypt'):
bcrypt = None
except ImportError:
bcrypt = None
class TestUtilsHashPass(unittest.TestCase):
def setUp(self):
load_hashers(password_hashers=default_hashers)
def test_simple(self):
encoded = make_password('lètmein')
self.assertTrue(encoded.startswith('pbkdf2_sha256$'))
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
def test_pkbdf2(self):
encoded = make_password('lètmein', 'seasalt', 'pbkdf2_sha256')
self.assertEqual(encoded,
'pbkdf2_sha256$10000$seasalt$CWWFdHOWwPnki7HvkcqN9iA2T3KLW1cf2uZ5kvArtVY=')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "pbkdf2_sha256")
def test_sha1(self):
encoded = make_password('lètmein', 'seasalt', 'sha1')
self.assertEqual(encoded,
'sha1$seasalt$cff36ea83f5706ce9aa7454e63e431fc726b2dc8')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "sha1")
def test_md5(self):
encoded = make_password('lètmein', 'seasalt', 'md5')
self.assertEqual(encoded,
'md5$seasalt$3f86d0d3d465b7b458c231bf3555c0e3')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "md5")
def test_unsalted_md5(self):
encoded = make_password('lètmein', '', 'unsalted_md5')
self.assertEqual(encoded, '88a434c88cca4e900f7874cd98123f43')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_md5")
# Alternate unsalted syntax
alt_encoded = "md5$$%s" % encoded
self.assertTrue(is_password_usable(alt_encoded))
self.assertTrue(check_password('lètmein', alt_encoded))
self.assertFalse(check_password('lètmeinz', alt_encoded))
def test_unsalted_sha1(self):
encoded = make_password('lètmein', '', 'unsalted_sha1')
self.assertEqual(encoded, 'sha1$$6d138ca3ae545631b3abd71a4f076ce759c5700b')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_sha1")
# Raw SHA1 isn't acceptable
alt_encoded = encoded[6:]
self.assertFalse(check_password('lètmein', alt_encoded))
@skipUnless(crypt, "no crypt module to generate password.")
def test_crypt(self):
encoded = make_password('lètmei', 'ab', 'crypt')
self.assertEqual(encoded, 'crypt$$ab1Hv2Lg7ltQo')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmei', encoded))
self.assertFalse(check_password('lètmeiz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "crypt")
@skipUnless(bcrypt, "py-bcrypt not installed")
def test_bcrypt(self):
encoded = make_password('lètmein', hasher='bcrypt')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith('bcrypt$'))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt")
def test_unusable(self):
encoded = make_password(None)
self.assertFalse(is_password_usable(encoded))
self.assertFalse(check_password(None, encoded))
self.assertFalse(check_password(UNUSABLE_PASSWORD, encoded))
self.assertFalse(check_password('', encoded))
self.assertFalse(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertRaises(ValueError, identify_hasher, encoded)
def test_bad_algorithm(self):
def doit():
make_password('lètmein', hasher='lolcat')
self.assertRaises(ValueError, doit)
self.assertRaises(ValueError, identify_hasher, "lolcat$salt$hash")
def test_bad_encoded(self):
self.assertFalse(is_password_usable('lètmein_badencoded'))
self.assertFalse(is_password_usable(''))
def test_low_level_pkbdf2(self):
hasher = PBKDF2PasswordHasher()
encoded = hasher.encode('lètmein', 'seasalt')
self.assertEqual(encoded,
'pbkdf2_sha256$10000$seasalt$CWWFdHOWwPnki7HvkcqN9iA2T3KLW1cf2uZ5kvArtVY=')
self.assertTrue(hasher.verify('lètmein', encoded))
def test_low_level_pbkdf2_sha1(self):
hasher = PBKDF2SHA1PasswordHasher()
encoded = hasher.encode('lètmein', 'seasalt')
self.assertEqual(encoded,
'pbkdf2_sha1$10000$seasalt$oAfF6vgs95ncksAhGXOWf4Okq7o=')
self.assertTrue(hasher.verify('lètmein', encoded))
def test_upgrade(self):
self.assertEqual('pbkdf2_sha256', get_hasher('default').algorithm)
for algo in ('sha1', 'md5'):
encoded = make_password('lètmein', hasher=algo)
state = {'upgraded': False}
def setter(password):
state['upgraded'] = True
self.assertTrue(check_password('lètmein', encoded, setter))
self.assertTrue(state['upgraded'])
def test_no_upgrade(self):
encoded = make_password('lètmein')
state = {'upgraded': False}
def setter():
state['upgraded'] = True
self.assertFalse(check_password('WRONG', encoded, setter))
self.assertFalse(state['upgraded'])
def test_no_upgrade_on_incorrect_pass(self):
self.assertEqual('pbkdf2_sha256', get_hasher('default').algorithm)
for algo in ('sha1', 'md5'):
encoded = make_password('lètmein', hasher=algo)
state = {'upgraded': False}
def setter():
state['upgraded'] = True
self.assertFalse(check_password('WRONG', encoded, setter))
self.assertFalse(state['upgraded'])
| agpl-3.0 | 1,080,996,786,189,361,900 | 42.440476 | 87 | 0.666758 | false |
datascopeanalytics/chicago-new-business | src/figs.py | 1 | 1358 | import seaborn as sns
import matplotlib.pyplot as plt
class FlowOverTime(object):
def __init__(self, year_range, new_counts, old_counts):
diff_counts = [new + old for new, old in zip(new_counts, old_counts)]
sns.set(style="white", context="talk")
palette = sns.color_palette(palette='Set1')
figure, axis = plt.subplots()
# create a barplot
bar_width = 0.8
bar_year_range = [year - bar_width/2 for year in year_range]
axis.bar(bar_year_range, new_counts, width=bar_width, color=palette[1], edgecolor=palette[1], label='new')
axis.bar(bar_year_range, old_counts, width=bar_width, color=palette[0], edgecolor=palette[0], label='expired')
# add a line for the differences
axis.plot(year_range, diff_counts, color='black', linewidth=2, label='change')
# specify the domain
axis.set_xlim(year_range[0]-1, year_range[-1]+1)
ylim = axis.get_ylim()
yabs = max(map(abs, ylim))
axis.set_ylim(-yabs, yabs)
axis.set_autoscale_on(False)
# labels
axis.legend()
# axis.set_xlabel("year")
axis.set_ylabel("business licenses")
# remove crud on axes
sns.despine(bottom=True)
def save(self, filename):
plt.savefig(filename)
def close(self):
plt.close()
| unlicense | -8,651,641,988,866,933,000 | 29.863636 | 118 | 0.607511 | false |
jonparrott/google-cloud-python | asset/noxfile.py | 2 | 3868 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import nox
LOCAL_DEPS = (os.path.join("..", "api_core"),)
def default(session):
# Install all test dependencies, then install this package in-place.
session.install("mock", "pytest", "pytest-cov")
for local_dep in LOCAL_DEPS:
session.install("-e", local_dep)
session.install("-e", ".")
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
"--cov=google.cloud",
"--cov=tests.unit",
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
"--cov-fail-under=86",
os.path.join("tests", "unit"),
*session.posargs,
)
@nox.session(python=["2.7", "3.5", "3.6", "3.7"])
def unit(session):
"""Run the unit test suite."""
default(session)
@nox.session(python=["2.7", "3.7"])
def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
# Sanity check: only run tests if found.
if not os.path.exists(system_test_path):
session.skip("System tests were not found")
# Use pre-release gRPC for system tests.
session.install("--pre", "grpcio")
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
session.install("mock", "pytest")
for local_dep in LOCAL_DEPS:
session.install("-e", local_dep)
session.install("-e", "../test_utils/")
session.install("-e", ".")
# Run py.test against the system tests.
session.run("py.test", "--quiet", system_test_path, *session.posargs)
@nox.session(python="3.7")
def blacken(session):
"""Run black.
Format code to uniform standard.
"""
session.install("black")
session.run(
"black",
"google",
"tests",
"docs",
"--exclude",
".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py",
)
@nox.session(python="3.7")
def lint(session):
"""Run linters.
Returns a failure if the linters find linting errors or sufficiently
serious code quality issues.
"""
session.install("flake8", "black", *LOCAL_DEPS)
session.install(".")
session.run(
"black",
"--check",
"google",
"tests",
"docs",
"--exclude",
".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py",
)
session.run("flake8", "google", "tests")
@nox.session(python="3.7")
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install("docutils", "pygments")
session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
@nox.session(python="3.7")
def cover(session):
"""Run the final coverage report.
This outputs the coverage report aggregating coverage from the unit
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=85")
session.run("coverage", "erase")
| apache-2.0 | 5,953,768,127,517,501,000 | 27.651852 | 80 | 0.619959 | false |
hospace/ToughRADIUS | toughradius/radiusd/plugins/acct_update_process.py | 4 | 1626 | #!/usr/bin/env python
#coding=utf-8
from twisted.python import log
from toughradius.radiusd.pyrad import packet
from toughradius.radiusd.settings import *
from toughradius.radiusd import utils
import logging
import datetime
def process(req=None,user=None,radiusd=None,**kwargs):
if not req.get_acct_status_type() == STATUS_TYPE_UPDATE:
return
if not user:
return log.err("[Acct] Received an accounting update request but user[%s] not exists"%req.get_user_name())
runstat=radiusd.runstat
store = radiusd.store
runstat.acct_update += 1
online = store.get_online(req.get_nas_addr(),req.get_acct_sessionid())
if not online:
sessiontime = req.get_acct_sessiontime()
updatetime = datetime.datetime.now()
_starttime = updatetime - datetime.timedelta(seconds=sessiontime)
online = utils.Storage(
account_number = user['account_number'],
nas_addr = req.get_nas_addr(),
acct_session_id = req.get_acct_sessionid(),
acct_start_time = _starttime.strftime( "%Y-%m-%d %H:%M:%S"),
framed_ipaddr = req.get_framed_ipaddr(),
mac_addr = req.get_mac_addr(),
nas_port_id = req.get_nas_portid(),
billing_times = req.get_acct_sessiontime(),
input_total = req.get_input_total(),
output_total = req.get_output_total(),
start_source = STATUS_TYPE_UPDATE
)
store.add_online(online)
log.msg('%s Accounting update request, update online'%req.get_user_name(),level=logging.INFO)
| agpl-3.0 | -4,266,075,051,683,850,000 | 36.837209 | 120 | 0.623616 | false |
dhomeier/astropy | astropy/io/fits/scripts/fitsdiff.py | 8 | 12904 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import argparse
import glob
import logging
import os
import sys
from astropy.io import fits
from astropy.io.fits.util import fill
from astropy import __version__
log = logging.getLogger('fitsdiff')
DESCRIPTION = """
Compare two FITS image files and report the differences in header keywords and
data.
fitsdiff [options] filename1 filename2
where filename1 filename2 are the two files to be compared. They may also be
wild cards, in such cases, they must be enclosed by double or single quotes, or
they may be directory names. If both are directory names, all files in each of
the directories will be included; if only one is a directory name, then the
directory name will be prefixed to the file name(s) specified by the other
argument. for example::
fitsdiff "*.fits" "/machine/data1"
will compare all FITS files in the current directory to the corresponding files
in the directory /machine/data1.
This script is part of the Astropy package. See
https://docs.astropy.org/en/latest/io/fits/usage/scripts.html#fitsdiff
for further documentation.
""".strip()
EPILOG = fill("""
If the two files are identical within the specified conditions, it will report
"No difference is found." If the value(s) of -c and -k takes the form
'@filename', list is in the text file 'filename', and each line in that text
file contains one keyword.
Example
-------
fitsdiff -k filename,filtnam1 -n 5 -r 1.e-6 test1.fits test2
This command will compare files test1.fits and test2.fits, report maximum of 5
different pixels values per extension, only report data values larger than
1.e-6 relative to each other, and will neglect the different values of keywords
FILENAME and FILTNAM1 (or their very existence).
fitsdiff command-line arguments can also be set using the environment variable
FITSDIFF_SETTINGS. If the FITSDIFF_SETTINGS environment variable is present,
each argument present will override the corresponding argument on the
command-line unless the --exact option is specified. The FITSDIFF_SETTINGS
environment variable exists to make it easier to change the
behavior of fitsdiff on a global level, such as in a set of regression tests.
""".strip(), width=80)
class StoreListAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
if nargs is not None:
raise ValueError("nargs not allowed")
super().__init__(option_strings, dest, nargs, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, [])
# Accept either a comma-separated list or a filename (starting with @)
# containing a value on each line
if values and values[0] == '@':
value = values[1:]
if not os.path.exists(value):
log.warning(f'{self.dest} argument {value} does not exist')
return
try:
values = [v.strip() for v in open(value, 'r').readlines()]
setattr(namespace, self.dest, values)
except OSError as exc:
log.warning('reading {} for {} failed: {}; ignoring this '
'argument'.format(value, self.dest, exc))
del exc
else:
setattr(namespace, self.dest,
[v.strip() for v in values.split(',')])
def handle_options(argv=None):
parser = argparse.ArgumentParser(
description=DESCRIPTION, epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'--version', action='version',
version=f'%(prog)s {__version__}')
parser.add_argument(
'fits_files', metavar='file', nargs='+',
help='.fits files to process.')
parser.add_argument(
'-q', '--quiet', action='store_true',
help='Produce no output and just return a status code.')
parser.add_argument(
'-n', '--num-diffs', type=int, default=10, dest='numdiffs',
metavar='INTEGER',
help='Max number of data differences (image pixel or table element) '
'to report per extension (default %(default)s).')
parser.add_argument(
'-r', '--rtol', '--relative-tolerance', type=float, default=None,
dest='rtol', metavar='NUMBER',
help='The relative tolerance for comparison of two numbers, '
'specifically two floating point numbers. This applies to data '
'in both images and tables, and to floating point keyword values '
'in headers (default %(default)s).')
parser.add_argument(
'-a', '--atol', '--absolute-tolerance', type=float, default=None,
dest='atol', metavar='NUMBER',
help='The absolute tolerance for comparison of two numbers, '
'specifically two floating point numbers. This applies to data '
'in both images and tables, and to floating point keyword values '
'in headers (default %(default)s).')
parser.add_argument(
'-b', '--no-ignore-blanks', action='store_false',
dest='ignore_blanks', default=True,
help="Don't ignore trailing blanks (whitespace) in string values. "
"Otherwise trailing blanks both in header keywords/values and in "
"table column values) are not treated as significant i.e., "
"without this option 'ABCDEF ' and 'ABCDEF' are considered "
"equivalent. ")
parser.add_argument(
'--no-ignore-blank-cards', action='store_false',
dest='ignore_blank_cards', default=True,
help="Don't ignore entirely blank cards in headers. Normally fitsdiff "
"does not consider blank cards when comparing headers, but this "
"will ensure that even blank cards match up. ")
parser.add_argument(
'--exact', action='store_true',
dest='exact_comparisons', default=False,
help="Report ALL differences, "
"overriding command-line options and FITSDIFF_SETTINGS. ")
parser.add_argument(
'-o', '--output-file', metavar='FILE',
help='Output results to this file; otherwise results are printed to '
'stdout.')
parser.add_argument(
'-u', '--ignore-hdus', action=StoreListAction,
default=[], dest='ignore_hdus',
metavar='HDU_NAMES',
help='Comma-separated list of HDU names not to be compared. HDU '
'names may contain wildcard patterns.')
group = parser.add_argument_group('Header Comparison Options')
group.add_argument(
'-k', '--ignore-keywords', action=StoreListAction,
default=[], dest='ignore_keywords',
metavar='KEYWORDS',
help='Comma-separated list of keywords not to be compared. Keywords '
'may contain wildcard patterns. To exclude all keywords, use '
'"*"; make sure to have double or single quotes around the '
'asterisk on the command-line.')
group.add_argument(
'-c', '--ignore-comments', action=StoreListAction,
default=[], dest='ignore_comments',
metavar='COMMENTS',
help='Comma-separated list of keywords whose comments will not be '
'compared. Wildcards may be used as with --ignore-keywords.')
group = parser.add_argument_group('Table Comparison Options')
group.add_argument(
'-f', '--ignore-fields', action=StoreListAction,
default=[], dest='ignore_fields',
metavar='COLUMNS',
help='Comma-separated list of fields (i.e. columns) not to be '
'compared. All columns may be excluded using "*" as with '
'--ignore-keywords.')
options = parser.parse_args(argv)
# Determine which filenames to compare
if len(options.fits_files) != 2:
parser.error('\nfitsdiff requires two arguments; '
'see `fitsdiff --help` for more details.')
return options
def setup_logging(outfile=None):
log.setLevel(logging.INFO)
error_handler = logging.StreamHandler(sys.stderr)
error_handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
error_handler.setLevel(logging.WARNING)
log.addHandler(error_handler)
if outfile is not None:
output_handler = logging.FileHandler(outfile)
else:
output_handler = logging.StreamHandler()
class LevelFilter(logging.Filter):
"""Log only messages matching the specified level."""
def __init__(self, name='', level=logging.NOTSET):
logging.Filter.__init__(self, name)
self.level = level
def filter(self, rec):
return rec.levelno == self.level
# File output logs all messages, but stdout logs only INFO messages
# (since errors are already logged to stderr)
output_handler.addFilter(LevelFilter(level=logging.INFO))
output_handler.setFormatter(logging.Formatter('%(message)s'))
log.addHandler(output_handler)
def match_files(paths):
if os.path.isfile(paths[0]) and os.path.isfile(paths[1]):
# shortcut if both paths are files
return [paths]
dirnames = [None, None]
filelists = [None, None]
for i, path in enumerate(paths):
if glob.has_magic(path):
files = [os.path.split(f) for f in glob.glob(path)]
if not files:
log.error('Wildcard pattern %r did not match any files.', path)
sys.exit(2)
dirs, files = list(zip(*files))
if len(set(dirs)) > 1:
log.error('Wildcard pattern %r should match only one '
'directory.', path)
sys.exit(2)
dirnames[i] = set(dirs).pop()
filelists[i] = sorted(files)
elif os.path.isdir(path):
dirnames[i] = path
filelists[i] = [f for f in sorted(os.listdir(path))
if os.path.isfile(os.path.join(path, f))]
elif os.path.isfile(path):
dirnames[i] = os.path.dirname(path)
filelists[i] = [os.path.basename(path)]
else:
log.error(
'%r is not an existing file, directory, or wildcard '
'pattern; see `fitsdiff --help` for more usage help.', path)
sys.exit(2)
dirnames[i] = os.path.abspath(dirnames[i])
filematch = set(filelists[0]) & set(filelists[1])
for a, b in [(0, 1), (1, 0)]:
if len(filelists[a]) > len(filematch) and not os.path.isdir(paths[a]):
for extra in sorted(set(filelists[a]) - filematch):
log.warning('%r has no match in %r', extra, dirnames[b])
return [(os.path.join(dirnames[0], f),
os.path.join(dirnames[1], f)) for f in filematch]
def main(args=None):
args = args or sys.argv[1:]
if 'FITSDIFF_SETTINGS' in os.environ:
args = os.environ['FITSDIFF_SETTINGS'].split() + args
opts = handle_options(args)
if opts.rtol is None:
opts.rtol = 0.0
if opts.atol is None:
opts.atol = 0.0
if opts.exact_comparisons:
# override the options so that each is the most restrictive
opts.ignore_keywords = []
opts.ignore_comments = []
opts.ignore_fields = []
opts.rtol = 0.0
opts.atol = 0.0
opts.ignore_blanks = False
opts.ignore_blank_cards = False
if not opts.quiet:
setup_logging(opts.output_file)
files = match_files(opts.fits_files)
close_file = False
if opts.quiet:
out_file = None
elif opts.output_file:
out_file = open(opts.output_file, 'w')
close_file = True
else:
out_file = sys.stdout
identical = []
try:
for a, b in files:
# TODO: pass in any additional arguments here too
diff = fits.diff.FITSDiff(
a, b,
ignore_hdus=opts.ignore_hdus,
ignore_keywords=opts.ignore_keywords,
ignore_comments=opts.ignore_comments,
ignore_fields=opts.ignore_fields,
numdiffs=opts.numdiffs,
rtol=opts.rtol,
atol=opts.atol,
ignore_blanks=opts.ignore_blanks,
ignore_blank_cards=opts.ignore_blank_cards)
diff.report(fileobj=out_file)
identical.append(diff.identical)
return int(not all(identical))
finally:
if close_file:
out_file.close()
# Close the file if used for the logging output, and remove handlers to
# avoid having them multiple times for unit tests.
for handler in log.handlers:
if isinstance(handler, logging.FileHandler):
handler.close()
log.removeHandler(handler)
| bsd-3-clause | 7,991,085,859,582,539,000 | 36.294798 | 80 | 0.618878 | false |
takeshineshiro/horizon | openstack_dashboard/dashboards/admin/volumes/volume_types/forms.py | 7 | 8730 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard.api import cinder
class CreateVolumeTypeEncryption(forms.SelfHandlingForm):
name = forms.CharField(label=_("Name"), required=False,
widget=forms.TextInput(attrs={'readonly':
'readonly'}))
provider = forms.CharField(max_length=255, label=_("Provider"))
control_location = forms.ChoiceField(label=_("Control Location"),
choices=(('front-end',
_('front-end')),
('back-end',
_('back-end')))
)
cipher = forms.CharField(label=_("Cipher"), required=False)
key_size = forms.IntegerField(label=_("Key Size (bits)"),
required=False,
min_value=1)
volume_type_id = forms.CharField(widget=forms.HiddenInput())
def handle(self, request, data):
try:
# Set Cipher to None if empty
if data['cipher'] is u'':
data['cipher'] = None
# Create encyrption for the volume type
volume_type = cinder.\
volume_encryption_type_create(request,
data['volume_type_id'],
data)
messages.success(request, _('Successfully created encryption for '
'volume type: %s') % data['name'])
return volume_type
except Exception:
redirect = reverse("horizon:admin:volumes:index")
exceptions.handle(request,
_('Unable to create encrypted volume type.'),
redirect=redirect)
class ManageQosSpecAssociation(forms.SelfHandlingForm):
qos_spec_choice = forms.ChoiceField(
label=_("QoS Spec to be associated"),
help_text=_("Choose associated QoS Spec."))
def __init__(self, request, *args, **kwargs):
super(ManageQosSpecAssociation, self).__init__(request,
*args,
**kwargs)
qos_spec_field = self.fields['qos_spec_choice']
qos_spec_field.choices = \
self.populate_qos_spec_choices()
def populate_qos_spec_choices(self):
# populate qos spec list box
qos_specs = self.initial["qos_specs"]
current_qos_spec = self.initial["cur_qos_spec_id"]
qos_spec_list = [(qos_spec.id, qos_spec.name)
for qos_spec in qos_specs
if qos_spec.id != current_qos_spec]
if current_qos_spec:
# used to remove the current spec
qos_spec_list.insert(0, ("-1", _("None (removes spec)")))
if qos_spec_list:
qos_spec_list.insert(0, ("", _("Select a new QoS spec")))
else:
qos_spec_list.insert(0, ("", _("No new QoS spec available")))
return qos_spec_list
def handle(self, request, data):
vol_type_id = self.initial['type_id']
new_qos_spec_id = data['qos_spec_choice']
# Update QOS Spec association information
try:
# NOTE - volume types can only be associated with
# ONE QOS Spec at a time
# first we need to un-associate the current QOS Spec, if it exists
cur_qos_spec_id = self.initial['cur_qos_spec_id']
if cur_qos_spec_id:
qos_spec = cinder.qos_spec_get(request,
cur_qos_spec_id)
cinder.qos_spec_disassociate(request,
qos_spec,
vol_type_id)
# now associate with new QOS Spec, if user wants one associated
if new_qos_spec_id != '-1':
qos_spec = cinder.qos_spec_get(request,
new_qos_spec_id)
cinder.qos_spec_associate(request,
qos_spec,
vol_type_id)
messages.success(request,
_('Successfully updated QoS Spec association.'))
return True
except Exception:
redirect = reverse("horizon:admin:volumes:index")
exceptions.handle(request,
_('Error updating QoS Spec association.'),
redirect=redirect)
class EditQosSpecConsumer(forms.SelfHandlingForm):
consumer_choice = forms.ChoiceField(
label=_("QoS Spec Consumer"),
choices=cinder.CONSUMER_CHOICES,
help_text=_("Choose consumer for this QoS Spec."))
def __init__(self, request, *args, **kwargs):
super(EditQosSpecConsumer, self).__init__(request, *args, **kwargs)
consumer_field = self.fields['consumer_choice']
qos_spec = self.initial["qos_spec"]
consumer_field.initial = qos_spec.consumer
def clean_consumer_choice(self):
# ensure that new consumer isn't the same as current consumer
qos_spec = self.initial['qos_spec']
cleaned_new_consumer = self.cleaned_data.get('consumer_choice')
old_consumer = qos_spec.consumer
if cleaned_new_consumer == old_consumer:
raise forms.ValidationError(
_('QoS Spec consumer value must be different than '
'the current consumer value.'))
return cleaned_new_consumer
def handle(self, request, data):
qos_spec_id = self.initial['qos_spec_id']
new_consumer = data['consumer_choice']
# Update QOS Spec consumer information
try:
cinder.qos_spec_set_keys(request,
qos_spec_id,
{'consumer': new_consumer})
messages.success(request,
_('Successfully modified QoS Spec consumer.'))
return True
except Exception:
redirect = reverse("horizon:admin:volumes:index")
exceptions.handle(request, _('Error editing QoS Spec consumer.'),
redirect=redirect)
class EditVolumeType(forms.SelfHandlingForm):
name = forms.CharField(max_length=255,
label=_("Name"))
description = forms.CharField(max_length=255,
widget=forms.Textarea(attrs={'rows': 4}),
label=_("Description"),
required=False)
def clean_name(self):
cleaned_name = self.cleaned_data['name']
if len(cleaned_name.strip()) == 0:
msg = _('New name cannot be empty.')
self._errors['name'] = self.error_class([msg])
return cleaned_name
def handle(self, request, data):
volume_type_id = self.initial['id']
try:
cinder.volume_type_update(request,
volume_type_id,
data['name'],
data['description'])
message = _('Successfully updated volume type.')
messages.success(request, message)
return True
except Exception as ex:
redirect = reverse("horizon:admin:volumes:index")
if ex.code == 409:
error_message = _('New name conflicts with another '
'volume type.')
else:
error_message = _('Unable to update volume type.')
exceptions.handle(request, error_message,
redirect=redirect)
| apache-2.0 | -3,924,998,294,218,275,300 | 41.378641 | 78 | 0.519015 | false |
tswast/google-cloud-python | container/google/cloud/container_v1beta1/__init__.py | 2 | 1033 | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from google.cloud.container_v1beta1 import types
from google.cloud.container_v1beta1.gapic import cluster_manager_client
from google.cloud.container_v1beta1.gapic import enums
class ClusterManagerClient(cluster_manager_client.ClusterManagerClient):
__doc__ = cluster_manager_client.ClusterManagerClient.__doc__
enums = enums
__all__ = ("enums", "types", "ClusterManagerClient")
| apache-2.0 | 4,824,280,240,045,201,000 | 33.433333 | 74 | 0.758955 | false |
seibert-media/Highton | highton/models/party.py | 1 | 3481 | from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import fields
class Party(
HightonModel,
):
"""
:ivar id: fields.IntegerField(name=HightonConstants.ID)
:ivar author_id: fields.IntegerField(name=HightonConstants.AUTHOR_ID)
:ivar background: fields.StringField(name=HightonConstants.BACKGROUND)
:ivar company_id: fields.IntegerField(name=HightonConstants.COMPANY_ID)
:ivar created_at: fields.DatetimeField(name=HightonConstants.CREATED_AT)
:ivar first_name: fields.StringField(name=HightonConstants.FIRST_NAME)
:ivar name: fields.StringField(name=HightonConstants.NAME)
:ivar group_id: fields.IntegerField(name=HightonConstants.GROUP_ID)
:ivar last_name: fields.StringField(name=HightonConstants.LAST_NAME)
:ivar owner_id: fields.IntegerField(name=HightonConstants.OWNER_ID)
:ivar title: fields.StringField(name=HightonConstants.TITLE)
:ivar updated_at: fields.DatetimeField(name=HightonConstants.UPDATED_AT)
:ivar visible_to: fields.StringField(name=HightonConstants.VISIBLE_TO)
:ivar company_name: fields.StringField(name=HightonConstants.COMPANY_NAME)
:ivar linkedin_url: fields.StringField(name=HightonConstants.LINKEDIN_URL)
:ivar avatar_url: fields.StringField(name=HightonConstants.AVATAR_URL)
:ivar type: fields.StringField(name=HightonConstants.TYPE)
:ivar tags: fields.ListField(name=HightonConstants.TAGS, init_class=Tag)
:ivar contact_data: fields.ObjectField(name=HightonConstants.CONTACT_DATA, init_class=ContactData)
:ivar subject_datas: fields.ListField(name=HightonConstants.SUBJECT_DATAS, init_class=SubjectData)
"""
ENDPOINT = HightonConstants.PARTIES
TAG_NAME = HightonConstants.PARTY
def __init__(self, **kwargs):
from highton.models import (
Tag,
ContactData,
SubjectData,
)
self.author_id = fields.IntegerField(name=HightonConstants.AUTHOR_ID)
self.background = fields.StringField(name=HightonConstants.BACKGROUND)
self.company_id = fields.IntegerField(name=HightonConstants.COMPANY_ID)
self.created_at = fields.DatetimeField(name=HightonConstants.CREATED_AT)
self.first_name = fields.StringField(name=HightonConstants.FIRST_NAME)
self.name = fields.StringField(name=HightonConstants.NAME)
self.group_id = fields.IntegerField(name=HightonConstants.GROUP_ID)
self.last_name = fields.StringField(name=HightonConstants.LAST_NAME)
self.owner_id = fields.IntegerField(name=HightonConstants.OWNER_ID)
self.title = fields.StringField(name=HightonConstants.TITLE)
self.updated_at = fields.DatetimeField(name=HightonConstants.UPDATED_AT)
self.visible_to = fields.StringField(name=HightonConstants.VISIBLE_TO)
self.company_name = fields.StringField(name=HightonConstants.COMPANY_NAME)
self.linkedin_url = fields.StringField(name=HightonConstants.LINKEDIN_URL)
self.avatar_url = fields.StringField(name=HightonConstants.AVATAR_URL)
self.type = fields.StringField(name=HightonConstants.TYPE)
self.tags = fields.ListField(name=HightonConstants.TAGS, init_class=Tag)
self.contact_data = fields.ObjectField(name=HightonConstants.CONTACT_DATA, init_class=ContactData)
self.subject_datas = fields.ListField(name=HightonConstants.SUBJECT_DATAS, init_class=SubjectData)
super().__init__(**kwargs)
| apache-2.0 | -3,361,866,450,321,599,500 | 55.145161 | 106 | 0.74921 | false |
mrev11/ccc3 | jt/jtpython/test-menu.py | 2 | 3806 | #! /usr/bin/env python
# _*_ coding: latin-1 _*_
from jtlib import *
from jtlib.jtalert import jtalert
def mkdialog():
dlg=jtdialog.new(6,20,20,80)
dlg.caption('Menü teszt')
dlg.layout="vbox"
dlg.layout="fix"
### Elsô pulldown (file)
menu=dlg.add(jtmenu.new())
menu.text="File"
menu.icon="images/middle.gif"
menu.tooltip="Egy szokványos fájl menü"
menu.mnemonic="F"
mi=menu.additem(jtmenuitem.new())
mi.text="New"
mi=menu.additem(jtmenuitem.new())
mi.text="Open"
mi=menu.additem(jtmenuitem.new())
mi.text="Save"
mi.accelerator="ctrl S"
mi=menu.additem(jtmenuitem.new())
mi.text="Save as"
menu.additem(jtmenusep.new())
mi=menu.additem(jtmenuitem.new())
mi.text="Quit"
mi.icon="images/exit.gif"
mi.accelerator="ctrl Q"
mi.actionblock=lambda dlg:dlg.close()
mi.tooltip="Kilép a programból"
### Második pulldown (radio, többszintû)
menu=dlg.add(menu1())
menu.text="Összetett menü"
menu.tooltip="Egy összetett menü"
menu.mnemonic="M"
m1=menu.additem(menu1())
m1.text="Belsô menü"
m1.mnemonic="B"
menu.additem(jtmenusep.new())
mc=menu.additem(jtmenucheck.new())
mc.name="csekk"
mc.text="Nicsak, egy csekkbox!"
menu.additem(jtmenusep.new())
mr=menu.additem(jtmenuradio.new())
mr.name="mr1"
mr.text="Ez az egyik lehetôség"
mr=menu.additem(jtmenuradio.new())
mr.text="Vagy próbáld ezt"
mr.name="mr2"
mr=menu.additem(jtmenuradio.new())
mr.text="Vagy talán inkább ezt"
mr.name="mr3"
menu.additem(jtmenusep.new())
mr=menu.additem(jtmenuradio.new())
mr.name="mr4"
mr.text="Ez az egyik lehetôség"
mr=menu.additem(jtmenuradio.new())
mr.text="Vagy próbáld ezt"
mr.name="mr5"
mr=menu.additem(jtmenuradio.new())
mr.text="Vagy talán inkább ezt"
mr.name="mr6"
mr=menu.additem(jtmenuradio.new())
mr.text="De ez a legjobb!"
mr.name="mr7"
mr.tooltip="Non plus ultra"
### Harmadik pulldown (check)
menu=dlg.add(jtmenu.new())
menu.text="Checkbox menü"
menu.tooltip="Checkbox menü demó"
menu.mnemonic="C"
mc=menu.additem(jtmenucheck.new())
mc.text="Elsô checkmenü"
mc.name="mc1"
mc.varput(1)
mc=menu.additem(jtmenucheck.new())
mc.text="Második checkmenü"
mc.name="mc2"
mc.varput(1)
mc=menu.additem(jtmenucheck.new())
mc.text="Harmadik checkmenü"
mc.name="mc3"
### Negyedik pulldown (help)
menu=dlg.add(jtmenusep.new())
menu=dlg.add(jtmenu.new())
menu.text="Help"
menu.tooltip="Help menü demó"
menu.icon="icons/16/help.png"
menu.mnemonic="H"
mi=menu.additem(jtmenuitem.new())
mi.text="Getting started"
mi.accelerator="F1"
mi.actionblock=lambda dlg:jtalert("Getting started")
mi=menu.additem(jtmenuitem.new())
mi.text="About"
mi.actionblock=lambda dlg:jtalert("Menü demó (C) ComFirm 2003.")
dlg.varinst("menu")
return dlg
def menu1():
menu=jtmenu.new()
mi=menu.additem(jtmenuitem.new())
mi.text="Elsô menüpont"
mi.mnemonic="E"
mi.actionblock=mkblock(mi.text)
mi=menu.additem(jtmenuitem.new())
mi.text="Második menüpont"
mi.mnemonic="M"
mi.actionblock=mkblock(mi.text)
mi=menu.additem(jtmenuitem.new())
mi.text="Harmadik menüpont"
mi.mnemonic="H"
mi.actionblock=mkblock(mi.text)
mi=menu.additem(jtmenuitem.new())
mi.text="Negyedik menüpont"
mi.mnemonic="N"
mi.actionblock=mkblock(mi.text)
return menu
def mkblock(text):
return lambda x:jtalert(text)
def main():
dlg=mkdialog()
dlg.show()
while dlg.getmessage():
pass
main()
| lgpl-2.1 | -1,531,147,518,782,421,200 | 20.033149 | 68 | 0.623226 | false |
jhayworth/config | .emacs.d/elpy/rpc-venv/lib/python2.7/site-packages/setuptools/extern/__init__.py | 10 | 2514 | import sys
class VendorImporter:
"""
A PEP 302 meta path importer for finding optionally-vendored
or otherwise naturally-installed packages from root_name.
"""
def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
self.root_name = root_name
self.vendored_names = set(vendored_names)
self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
@property
def search_path(self):
"""
Search first the vendor package then as a natural package.
"""
yield self.vendor_pkg + '.'
yield ''
def find_module(self, fullname, path=None):
"""
Return self when fullname starts with root_name and the
target module is one vendored through this importer.
"""
root, base, target = fullname.partition(self.root_name + '.')
if root:
return
if not any(map(target.startswith, self.vendored_names)):
return
return self
def load_module(self, fullname):
"""
Iterate over the search path to locate and load fullname.
"""
root, base, target = fullname.partition(self.root_name + '.')
for prefix in self.search_path:
try:
extant = prefix + target
__import__(extant)
mod = sys.modules[extant]
sys.modules[fullname] = mod
# mysterious hack:
# Remove the reference to the extant package/module
# on later Python versions to cause relative imports
# in the vendor package to resolve the same modules
# as those going through this importer.
if sys.version_info >= (3, ):
del sys.modules[extant]
return mod
except ImportError:
pass
else:
raise ImportError(
"The '{target}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.".format(**locals())
)
def install(self):
"""
Install this importer into sys.meta_path if not already present.
"""
if self not in sys.meta_path:
sys.meta_path.append(self)
names = 'six', 'packaging', 'pyparsing', 'ordered_set',
VendorImporter(__name__, names, 'setuptools._vendor').install()
| gpl-3.0 | -3,533,123,111,926,965,000 | 33.438356 | 78 | 0.558473 | false |
Grumbel/dirtool | tests/test_util.py | 1 | 1341 | # dirtool.py - diff tool for directories
# Copyright (C) 2017 Ingo Ruhnke <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from dirtools.util import numeric_sorted
class UtilTestCase(unittest.TestCase):
def test_numeric_sorted(self):
tests = [
(['2', '22', '10', '1'],
['1', '2', '10', '22']),
(['a9', 'a999', 'a99', 'a9999'],
['a9', 'a99', 'a999', 'a9999']),
(['aaa', '999'],
['999', 'aaa']),
(['a9a', 'a9z', 'a9d', 'a9m3', 'a9m5', 'a9m1'],
['a9a', 'a9d', 'a9m1', 'a9m3', 'a9m5', 'a9z']),
]
for lhs, rhs in tests:
self.assertEqual(numeric_sorted(lhs), rhs)
# EOF #
| gpl-3.0 | -4,314,944,913,284,218,400 | 29.477273 | 71 | 0.60701 | false |
adalke/rdkit | rdkit/Chem/MolDb/Loader_sa.py | 1 | 6074 | # $Id$
#
# Copyright (C) 2007-2009 Greg Landrum
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
import sqlalchemy
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.Chem import Lipinski,Descriptors,Crippen
from rdkit.Dbase.DbConnection import DbConnect
from rdkit.Dbase import DbModule
import os
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Table,Column,MetaData
from sqlalchemy import Integer,Text,String,ForeignKey,Binary,DateTime,Float
from sqlalchemy.orm import relation,mapper,sessionmaker,backref
from sqlalchemy import create_engine
decBase = declarative_base()
class Compound(decBase):
__tablename__='molecules'
guid=Column(Integer,primary_key=True)
molpkl=Column(Binary)
def RegisterSchema(dbUrl,echo=False):
engine = create_engine(dbUrl,echo=echo)
decBase.metadata.create_all(engine)
maker = sessionmaker(bind=engine)
return maker
ConnectToSchema=RegisterSchema
def _ConnectToSchema(dbUrl,echo=False):
engine = create_engine(dbUrl,echo=echo)
meta
decBase.metadata.create_all(engine)
maker = sessionmaker(bind=engine)
return maker
#set up the logger:
import rdkit.RDLogger as logging
logger = logging.logger()
logger.setLevel(logging.INFO)
def ProcessMol(session,mol,globalProps,nDone,nameProp='_Name',nameCol='compound_id',
redraw=False,keepHs=False,
skipProps=False,addComputedProps=False,
skipSmiles=False):
if not mol:
raise ValueError('no molecule')
if keepHs:
Chem.SanitizeMol(mol)
try:
nm = mol.GetProp(nameProp)
except KeyError:
nm = None
if not nm:
nm = 'Mol_%d'%nDone
cmpd = Compound()
session.add(cmpd)
if redraw:
AllChem.Compute2DCoords(m)
if not skipSmiles:
cmpd.smiles=Chem.MolToSmiles(mol,True)
cmpd.molpkl=mol.ToBinary()
setattr(cmpd,nameCol,nm)
if not skipProps:
if addComputedProps:
cmpd.DonorCount=Lipinski.NumHDonors(mol)
cmpd.AcceptorCount=Lipinski.NumHAcceptors(mol)
cmpd.RotatableBondCount=Lipinski.NumRotatableBonds(mol)
cmpd.AMW=Descriptors.MolWt(mol)
cmpd.MolLogP=Crippen.MolLogP(mol)
pns = list(mol.GetPropNames())
for pi,pn in enumerate(pns):
if pn.lower()==nameCol.lower(): continue
pv = mol.GetProp(pn).strip()
if pn in globalProps:
setattr(cmpd,pn.lower(),pv)
return cmpd
def LoadDb(suppl,dbName,nameProp='_Name',nameCol='compound_id',silent=False,
redraw=False,errorsTo=None,keepHs=False,defaultVal='N/A',skipProps=False,
regName='molecules',skipSmiles=False,maxRowsCached=-1,
uniqNames=False,addComputedProps=False,lazySupplier=False,
numForPropScan=10,startAnew=True):
if not lazySupplier:
nMols = len(suppl)
else:
nMols=-1
if not silent:
logger.info("Generating molecular database in file %s"%dbName)
if not lazySupplier:
logger.info(" Processing %d molecules"%nMols)
globalProps = {}
if startAnew:
if os.path.exists(dbName):
for i in range(5):
try:
os.unlink(dbName)
break
except:
import time
time.sleep(2)
if os.path.exists(dbName):
raise IOError('could not delete old database %s'%dbName)
sIter=iter(suppl)
setattr(Compound,nameCol.lower(),Column(nameCol.lower(),String,default=defaultVal,unique=uniqNames))
if not skipSmiles:
Compound.smiles = Column(Text,unique=True)
if not skipProps:
while numForPropScan>0:
try:
m = next(sIter)
except StopIteration:
numForPropScan=0
break
if not m: continue
for pn in m.GetPropNames():
if pn.lower()==nameCol.lower(): continue
if pn not in globalProps:
globalProps[pn]=1
setattr(Compound,pn.lower(),Column(pn.lower(),String,default=defaultVal))
numForPropScan-=1
if addComputedProps:
Compound.DonorCount=Column(Integer)
Compound.AcceptorCount=Column(Integer)
Compound.RotatableBondCount=Column(Integer)
Compound.AMW=Column(Float)
Compound.MolLogP=Column(Float)
session=RegisterSchema('sqlite:///%s'%(dbName))()
nDone = 0
cache=[]
for m in suppl:
nDone +=1
if not m:
if errorsTo:
if hasattr(suppl,'GetItemText'):
d = suppl.GetItemText(nDone-1)
errorsTo.write(d)
else:
logger.warning('full error file support not complete')
continue
cmpd=ProcessMol(session,m,globalProps,nDone,nameProp=nameProp,
nameCol=nameCol,redraw=redraw,
keepHs=keepHs,skipProps=skipProps,
addComputedProps=addComputedProps,skipSmiles=skipSmiles)
if cmpd is not None:
cache.append(cmpd)
if not silent and not nDone%100:
logger.info(' done %d'%nDone)
try:
session.commit()
except Exception:
session.rollback()
for cmpd in cache:
try:
session.add(cmpd)
session.commit()
except Exception:
session.rollback()
except BaseException:
# Rollback even with KeyboardInterrupt
session.rollback()
raise
cache=[]
try:
session.commit()
except BaseException as exc:
import traceback
traceback.print_exc()
session.rollback()
for cmpd in cache:
try:
session.add(cmpd)
session.commit()
except Exception:
session.rollback()
except BaseException:
session.rollback()
raise
if not isinstance(exc, Exception):
# Re-raise on KeyboardInterrupt, SystemExit, etc.
raise exc
if __name__=='__main__':
import sys
sdf =Chem.SDMolSupplier(sys.argv[1])
db =sys.argv[2]
LoadDb(sdf,db,addComputedProps=False)
session = RegisterSchema('sqlite:///%s'%(db))()
print('>>>>', len(session.query(Compound).all()))
| bsd-3-clause | -278,411,356,040,623,140 | 28.062201 | 102 | 0.666118 | false |
aquaya/pipeline | application/models.py | 1 | 12596 | ''' mongoengine models
'''
from mongoengine import *
class User(Document):
''' some are admins some are not
'''
admin_rights = BooleanField(required=True)
api_id = StringField()
api_key = StringField()
email = EmailField(required=True, unique=True, max_length=254)
email_confirmation_code = StringField(required=True)
email_confirmed = BooleanField(required=True)
forgot_password_code = StringField()
last_login_time = DateTimeField(required=True)
name = StringField()
organizations = ListField(ReferenceField('Organization'))
password_hash = StringField(required=True)
registration_time = DateTimeField(required=True)
uploads = ListField(ReferenceField('Upload'))
verified = BooleanField(required=True)
class Organization(Document):
''' people join orgs
'''
description = StringField(default='')
location = StringField()
# url-safe version of the name
label = StringField(unique=True, required=True)
name = StringField(unique=True, required=True)
projects = ListField(ReferenceField('Project'))
class Project(Document):
''' projects belong to orgs
'''
creation_time = DateTimeField(required=True)
description = StringField(default='')
# track order of schema keys
# deprecated - now tracked by ordered_schema
#headers = ListField(StringField(unique_with='name'))
# url-safe version of the name
label = StringField(required=True, unique_with='organization')
name = StringField(required=True, unique_with='organization')
organization = ReferenceField(Organization)
# maps headers to data types
# deprecated
#schema = DictField()
# new version of schemas
ordered_schema = ListField(ReferenceField('Header'))
default_sort_header = ReferenceField('Header')
default_sort_descending = BooleanField()
update_time = DateTimeField()
uploads = ListField(ReferenceField('Upload'))
class Header(Document):
''' excel header, coupled to schemas
'''
data_type = StringField()
display = BooleanField()
name = StringField()
project = ReferenceField(Project)
label = StringField()
class Upload(Document):
''' raw files; connected to projects
'''
description = StringField(default='')
extension = StringField()
filename = StringField()
# the schema
headers = ListField(StringField(unique_with='name'))
label = StringField(unique_with='project')
name = StringField(unique_with='project')
# worker messages generated during background processing
# this message list should be cleared after they're flashed
# {'status': 'error', 'message': 'bad headers'}
worker_messages = ListField(DictField())
project = ReferenceField(Project)
s3_key = StringField()
upload_time = DateTimeField()
uploaded_by = ReferenceField(User)
class Entry(Document):
''' rows in an uploaded file
'''
# if the entry originates from a data connection
connection_log = ReferenceField('ConnectionLogEntry')
project = ReferenceField(Project)
# for duplicate-checking
unique = BooleanField()
# if it came from an upload
upload = ReferenceField(Upload)
# md5 of the values in the entry to check for uniqueness
value_hash = StringField()
# the actual entries, keyed by header names
values = DictField()
# available for analysis
visible = BooleanField(default=True)
# whether the entry was ever edited
# awkward "was_never" due to past experiences with 'default=False' bugs
was_never_edited = BooleanField(default=True)
class Graph(Document):
''' user-defined graph for a project's data
project or upload data can be plotted with this definition
'''
# apply any of the included filters or all of them
apply_any_filters = BooleanField(default=True)
creation_time = DateTimeField()
creator = ReferenceField(User)
description = StringField(default='')
# any filters attached to this graph
filters = ListField(ReferenceField('Filter'))
# 'line', 'scatter', 'pie'
graph_type = StringField(default='line')
# slug
label = StringField()
project = ReferenceField(Project)
# name of this graph config
name = StringField(required=True, unique_with='project')
# header to be used as x-axis for line graphs
xaxis = ReferenceField('Header')
# header to be used as y-axis for line graphs
yaxis = ReferenceField('Header')
# header to be used as the categorizer for pie charts
pie_header = ReferenceField('Header')
class Statistic(Document):
''' user-defined methods for analyzing data
'''
# apply any of the included filters or all of them
apply_any_filters = BooleanField(default=True)
creation_time = DateTimeField()
creator = ReferenceField(User)
description = StringField(default='')
# used to winnow the data
filters = ListField(ReferenceField('Filter'))
# we analyze the values based on this header
header = ReferenceField('Header')
# a label for urls
label = StringField(required=True)
name = StringField(required=True, unique_with='project')
project = ReferenceField(Project)
# average, count, uniques
statistic_type = StringField()
# computing tables of stats
pivot = BooleanField()
pivot_header = ReferenceField('Header')
pivot_values = ListField(StringField())
class Filter(Document):
''' applied to statistics and graphs
'''
# the value to which things are compared
# using a dictionary to allow multiple types. keyed by 'value'
compare_to = DictField()
creation_time = DateTimeField()
creator = ReferenceField(User)
description = StringField(default='')
# a label for urls
label = StringField(required=True)
name = StringField(required=True, unique_with='project')
project = ReferenceField(Project)
# string representation of the comparison
comparison = StringField()
# we compare the value at this Header
header = ReferenceField('Header')
class Report(Document):
''' a collection of stats, graphs, and notes
'''
# referenced graphs or stats
components = ListField(GenericReferenceField())
creation_time = DateTimeField()
creator = ReferenceField(User)
description = StringField(default='')
# a label for urls
label = StringField(required=True)
name = StringField(required=True, unique_with='project')
project = ReferenceField(Project)
# makes up random public url for wkhtmltopdf to access
public_label = StringField()
class Rendering(Document):
''' pdfed report
'''
# referenced graphs, referenced stats, or notes
# Dict is {'type': 'note/statistic/graph', 'value': 'content/name'}
components = ListField(DictField())
creation_time = DateTimeField()
filename = StringField()
# a secondary ID with a bit more entropy
label = StringField()
report = ReferenceField(Report)
# storage on s3
s3_key = StringField()
class Schedule(Document):
''' a configuration for sending reports
tracks what reports to send
and when to send it
'''
description = StringField(default='')
# list of {'name': 'me', 'email': '[email protected]'} dictionaries
email_recipients = ListField(DictField())
email_body = StringField()
email_subject = StringField()
''' scheduling info; contents vary based on value of the key 'send_every'
need to be able to handle these scenarios which correspond to (type):
- every day at 5pm (every_time)
- every Friday at 4:30pm (every_day)
- first Monday of month at 9am (every_first_day_of_month)
- 1st of each month (every_date_of_month)
{
'type': 'every_time/every_day/etc..see above'
, 'at': '09:00 AM'
, 'on_day': 'Monday'
}
'''
interval = DictField()
# a label for urls
label = StringField(required=True)
last_run_time = DateTimeField()
name = StringField(required=True, unique_with='project')
# celery task id of the next task that should fire
# allows old tasks to be skipped when the schedule changes
next_task_id = StringField()
# when this schedule fires next
next_run_time = DateTimeField()
project = ReferenceField(Project)
# 'email' or 'sms'
message_type = StringField()
# list of {'name': 'me', 'phone_number': '+1234567890'} dictionaries
sms_recipients = ListField(DictField())
''' attachments
objects that are sent with this schedule
'''
# pdf reports
reports = ListField(ReferenceField(Report, reverse_delete_rule=PULL))
# whether to send all the project entries and apply any filters
send_project_data = BooleanField()
data_filters = ListField(ReferenceField(Filter, reverse_delete_rule=PULL))
apply_any_filters = BooleanField()
# statistics to include
statistics = ListField(ReferenceField(Statistic, reverse_delete_rule=PULL))
class Message(Document):
''' schedules create messages
'''
creation_time = DateTimeField()
message_type = StringField()
# absolute path to the locally saved project data
project_data_path = StringField()
recipients = ListField(DictField())
renderings = ListField(ReferenceField(Rendering))
schedule = ReferenceField(Schedule)
sent_time = DateTimeField()
# computed stat results in text form
statistic_results = ListField(StringField())
class Connection(Document):
''' automated connection to another data service
'''
description = StringField(default='')
schedule = ReferenceField('RepeatingTask')
# a label for urls
label = StringField(required=True)
name = StringField(required=True, unique_with='project')
project = ReferenceField(Project)
# only 'commcare' is allowed
service_type = StringField()
meta = {'allow_inheritance': True}
class ConnectionLogEntry(Document):
''' a record of automatically accessing a Connection
'''
initiation_time = DateTimeField()
completion_time = DateTimeField()
http_status_code = StringField()
# the number of records were captured this time
project = ReferenceField(Project)
records_received = IntField()
service_type = StringField()
class CommCareConnection(Connection):
''' connection to the commcare api
'''
''' credentials attr contains 'username' and 'password' keys
should be a throwaway user for api purposes only
have to store password in plaintext here, unfortunately
'''
credentials = DictField()
# commcare domain setting
domain = StringField()
# the form we are targeting
export_tag = StringField()
# whether or not to ask for submissions by the admin user
include_admin = BooleanField()
# if False, do not save fields that are not strictly on the form itself
include_metadata = BooleanField()
# used in next request to only get updates
latest_export_token = StringField()
class RepeatingTask(Document):
''' handles information related to any scheduled events
'''
''' interval contains scheduling info
contents vary based on value of the key 'send_every'
need to be able to handle these scenarios which correspond to (type):
- every day at 5pm (every_time)
- every Friday at 4:30pm (every_day)
- first Monday of month at 9am (every_first_day_of_month)
- 1st of each month (every_date_of_month)
{
'type': 'every_time/every_day/etc..see above'
, 'at': '09:00 AM'
, 'on_day': 'Monday'
}
'''
interval = DictField()
last_run_time = DateTimeField()
''' when a job fires, the 'next_task_id' should be checked first
if the firing job's id doesn't match, the interval was updated
that means the firing job is obsolete and should be ignored
'''
next_task_id = StringField()
# when this interval fires next
next_run_time = DateTimeField()
class Comment(Document):
''' tracking edits to entries and general discussion
'''
body = StringField(max_length=1000)
creation_time = DateTimeField()
# if it's editable, it's also deletable
editable = BooleanField(default=True)
# tag the entry if the comment is related to an entry edit
entry = ReferenceField(Entry)
owner = ReferenceField(User)
# use owner_name if User was deleted
owner_name = StringField()
project = ReferenceField(Project)
# document version
version = IntField(default=1)
| mit | -8,808,990,847,646,030,000 | 33.043243 | 79 | 0.681724 | false |
TritonNetworking/themis_tritonsort | src/scripts/themis/metaprograms/utils/utils.py | 2 | 8889 | import os, sys, subprocess, shlex, json, re
from LogLineDescription import load_descriptions_from_file
LOGGREP_PATH = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir,
"loggrep", "loggrep"))
def match_queries_to_descriptions(queries, descriptions):
"""
When given a list of StatQuery objects and a list of LogLineDescription
objects, attempts to match each query in the StatQuery list to a single
description in the LogLineDescription list.
"""
for query in queries:
field_names = query.field_names
matching_descriptions = filter(
lambda x: x.matches_query(query),
descriptions)
if len(matching_descriptions) == 0:
print >>sys.stderr, ("Couldn't match query %s to any description. "
"Valid descriptions are:\n%s" %
(query, '\n'.join(map(str, descriptions))))
return False
elif len(matching_descriptions) > 1:
print >>sys.stderr, (
"Multiple matching descriptions for query %s: %s" % (
query, '\n' + '\n'.join(map(str, matching_descriptions))))
return False
else:
query.description = matching_descriptions[0]
return True
def load_experiment_files(experiment_log_directory, skipped_phases=[]):
"""
Returns a collection of (job name, phase name, hostname, file list)
4-tuples. Each file in the file list is a triple consisting of the stats
filename, the params filename, and the LogLineDescription object
corresponding to the log line descriptions for that stats file.
"""
experiment_log_directory = os.path.abspath(experiment_log_directory)
assert os.path.exists(experiment_log_directory), (
"Can't find directory %s" % (experiment_log_directory))
batch_name = re.match("batch_\d+", os.path.basename(
experiment_log_directory))
assert batch_name is not None, ("Expected log directory name to be of "
"the form `batch_#`")
batch_name = batch_name.group(0)
experiment_files = []
for (dir_path, child_directories, filenames) in \
os.walk(experiment_log_directory):
# Only consider leaf directories
if len(child_directories) > 0:
continue
# Treat the batch name as the job name. The directory should contain
# directories for each phase (including multiple runs of phase zero,
# which we'll treat separately)
job_name = batch_name
phase_name = os.path.basename(dir_path)
# Check to see if this phase should be skipped.
base_phase_name = phase_name
job_suffix_position = phase_name.find("_job")
if job_suffix_position != -1:
base_phase_name = phase_name[:job_suffix_position]
if base_phase_name in skipped_phases:
# We should skip this phase.
continue
stat_log_files = []
params_files = []
descriptor_files = []
for filename in filenames:
for (suffix, file_list) in [
("_stats.log", stat_log_files),
("_params.log", params_files),
("_stat_descriptors.log", descriptor_files)]:
suffix_index = filename.find(suffix)
if suffix_index != -1:
file_list.append(os.path.join(dir_path, filename))
if len(stat_log_files) != len(params_files) or \
len(params_files) != len(descriptor_files):
print "Some required files missing from %s (%d stat log files, " \
"%d params files, %d descriptor files). Skipping %s" % (
dir_path, len(stat_log_files), len(params_files),
len(descriptor_files), base_phase_name)
continue
stat_log_files.sort()
params_files.sort()
descriptor_files.sort()
descriptions = [load_descriptions_from_file(os.path.join(dir_path, f))
for f in descriptor_files]
collated_files = zip(stat_log_files, params_files, descriptions)
for file_set in collated_files:
stat_filename = os.path.basename(file_set[0])
hostname = stat_filename[:stat_filename.find("_stats.log")]
experiment_files.append((job_name, phase_name, hostname, file_set))
if len(experiment_files) == 0:
raise ValueError("Didn't find any experiment log files in '%s'. Are you sure "
"that it is a top-level experiment directory?" %
(experiment_log_directory))
return experiment_files
def run_queries_on_log_file(
queries, log_file, job_name, hostname, output_data, verbose):
"""
Run a list of queries on a given log file
"""
if verbose:
print "Running queries on '%s' ..." % (log_file)
regexPatterns = [query.get_regex().pattern.replace('\t', '\\t')
for query in queries]
loggrep_regex_args = ' '.join('"%s"' % (pattern)
for pattern in regexPatterns)
loggrep_command_str = "%(loggrep)s %(log_file)s %(args)s" % {
"loggrep" : LOGGREP_PATH,
"log_file" : log_file,
"args" : loggrep_regex_args}
loggrep_cmd = subprocess.Popen(shlex.split(loggrep_command_str),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = loggrep_cmd.communicate()
if loggrep_cmd.returncode != 0:
sys.exit("An error occurred while processing '%s': %s" % (
loggrep_command_str, stderr))
for line in stdout.split('\n'):
line = line.strip()
if len(line) == 0:
continue
line_chunks = line.split('\t')
query_number = int(line_chunks[0])
match_parts = line_chunks[1:]
matching_query = queries[query_number]
match_dict = matching_query.description.log_line_to_dict(match_parts)
# Add information about the job and host to the match dictionary
match_dict["hostname"] = hostname
match_dict["job_name"] = job_name
matching_query.match_processor_function(match_dict, output_data)
def process_queries_for_leaf_dir(queries, job_name, hostname,
file_list, output_data, verbose):
stats_file, params_file, stat_descriptions = file_list
if not match_queries_to_descriptions(queries, stat_descriptions):
sys.exit("Matching queries to descriptions failed")
run_queries_on_log_file(queries, stats_file, job_name, hostname,
output_data, verbose)
def process_queries(
queries, experiment_log_directory, verbose, skipped_phases=[]):
"""
Run a set of queries across every log file in an experiment log directory.
"""
log_file_lists = load_experiment_files(
experiment_log_directory, skipped_phases)
if not os.path.exists(LOGGREP_PATH):
sys.exit("Can't find '%s'; be sure you've compiled loggrep and, "
"if you're doing an out-of-source build, that you've "
"symlinked loggrep into this location" % (LOGGREP_PATH))
output_data = {}
# Each 4-tuple produced by load_experiment_files represents a list of files
# in a leaf directory of the experiment log directory; process each of
# these in turn
for (job_name, phase_name, hostname, file_list) in log_file_lists:
process_queries_for_leaf_dir(
queries, job_name, hostname, file_list, output_data, verbose)
return output_data
def populate_nested_dictionary(dictionary, key_list, types=[]):
"""
For each key k_i in key_list, make sure that
dictionary[k_0][k_1][k_2]...[k_i] exists, creating objects of the
appropriate type in each sub-dictionary as needed.
If types is specified, then dictionary[key_list[i]] should point to an
object of type types[i] for all i
returns the element corresponding to dictionary[k_0][k_1]...[k_{n-1}] where
n is the size of key_list
"""
if len(key_list) > 0:
if key_list[0] not in dictionary:
if len(types) > 0:
dictionary[key_list[0]] = types[0]()
else:
dictionary[key_list[0]] = {}
return populate_nested_dictionary(dictionary[key_list[0]], key_list[1:],
types[1:])
else:
return dictionary
def job_sequence(experiment_directory):
return [os.path.basename(os.path.abspath(experiment_directory))]
def job_description(experiment_directory, job_name):
# Assume that the description files were copied to the experiment dir.
return experiment_directory
| bsd-3-clause | 5,822,477,329,420,167,000 | 35.731405 | 86 | 0.602205 | false |
bigswitch/neutron | neutron/api/rpc/handlers/dvr_rpc.py | 9 | 5496 | # Copyright 2014, Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import helpers as log_helpers
from oslo_log import log as logging
import oslo_messaging
from neutron.common import constants
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron import manager
LOG = logging.getLogger(__name__)
class DVRServerRpcApi(object):
"""Agent-side RPC (stub) for agent-to-plugin interaction.
This class implements the client side of an rpc interface. The server side
can be found below: DVRServerRpcCallback. For more information on changing
rpc interfaces, see doc/source/devref/rpc_api.rst.
"""
# 1.0 Initial Version
# 1.1 Support for passing 'fixed_ips' in get_subnet_for_dvr function.
# Passing 'subnet" will be deprecated in the next release.
def __init__(self, topic):
target = oslo_messaging.Target(topic=topic, version='1.0',
namespace=constants.RPC_NAMESPACE_DVR)
self.client = n_rpc.get_client(target)
@log_helpers.log_method_call
def get_dvr_mac_address_by_host(self, context, host):
cctxt = self.client.prepare()
return cctxt.call(context, 'get_dvr_mac_address_by_host', host=host)
@log_helpers.log_method_call
def get_dvr_mac_address_list(self, context):
cctxt = self.client.prepare()
return cctxt.call(context, 'get_dvr_mac_address_list')
@log_helpers.log_method_call
def get_ports_on_host_by_subnet(self, context, host, subnet):
"""Get DVR serviced ports on given host and subnet."""
cctxt = self.client.prepare()
return cctxt.call(context, 'get_ports_on_host_by_subnet',
host=host, subnet=subnet)
@log_helpers.log_method_call
def get_subnet_for_dvr(self, context, subnet, fixed_ips):
cctxt = self.client.prepare()
return cctxt.call(
context, 'get_subnet_for_dvr', subnet=subnet, fixed_ips=fixed_ips)
class DVRServerRpcCallback(object):
"""Plugin-side RPC (implementation) for agent-to-plugin interaction.
This class implements the server side of an rpc interface. The client side
can be found above: DVRServerRpcApi. For more information on changing rpc
interfaces, see doc/source/devref/rpc_api.rst.
"""
# History
# 1.0 Initial version
# 1.1 Support for passing the 'fixed_ips" in get_subnet_for_dvr.
# Passing subnet will be deprecated in the next release.
target = oslo_messaging.Target(version='1.1',
namespace=constants.RPC_NAMESPACE_DVR)
@property
def plugin(self):
if not getattr(self, '_plugin', None):
self._plugin = manager.NeutronManager.get_plugin()
return self._plugin
def get_dvr_mac_address_list(self, context):
return self.plugin.get_dvr_mac_address_list(context)
def get_dvr_mac_address_by_host(self, context, **kwargs):
host = kwargs.get('host')
LOG.debug("DVR Agent requests mac_address for host %s", host)
return self.plugin.get_dvr_mac_address_by_host(context, host)
def get_ports_on_host_by_subnet(self, context, **kwargs):
"""Get DVR serviced ports for given host and subnet."""
host = kwargs.get('host')
subnet = kwargs.get('subnet')
LOG.debug("DVR Agent requests list of VM ports on host %s", host)
return self.plugin.get_ports_on_host_by_subnet(context,
host, subnet)
def get_subnet_for_dvr(self, context, **kwargs):
fixed_ips = kwargs.get('fixed_ips')
subnet = kwargs.get('subnet')
return self.plugin.get_subnet_for_dvr(
context, subnet, fixed_ips=fixed_ips)
class DVRAgentRpcApiMixin(object):
"""Plugin-side RPC (stub) for plugin-to-agent interaction."""
DVR_RPC_VERSION = "1.0"
def _get_dvr_update_topic(self):
return topics.get_topic_name(self.topic,
topics.DVR,
topics.UPDATE)
def dvr_mac_address_update(self, context, dvr_macs):
"""Notify dvr mac address updates."""
if not dvr_macs:
return
cctxt = self.client.prepare(topic=self._get_dvr_update_topic(),
version=self.DVR_RPC_VERSION, fanout=True)
cctxt.cast(context, 'dvr_mac_address_update', dvr_macs=dvr_macs)
class DVRAgentRpcCallbackMixin(object):
"""Agent-side RPC (implementation) for plugin-to-agent interaction."""
def dvr_mac_address_update(self, context, **kwargs):
"""Callback for dvr_mac_addresses update.
:param dvr_macs: list of updated dvr_macs
"""
dvr_macs = kwargs.get('dvr_macs', [])
LOG.debug("dvr_macs updated on remote: %s", dvr_macs)
self.dvr_agent.dvr_mac_address_update(dvr_macs)
| apache-2.0 | 6,510,722,061,339,239,000 | 37.166667 | 79 | 0.650109 | false |
niltonlk/nest-simulator | pynest/nest/tests/test_quantal_stp_synapse.py | 8 | 3798 | # -*- coding: utf-8 -*-
#
# test_quantal_stp_synapse.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
# This script compares the two variants of the Tsodyks/Markram synapse in NEST.
import nest
import numpy
import unittest
@nest.ll_api.check_stack
class QuantalSTPSynapseTestCase(unittest.TestCase):
"""Compare quantal_stp_synapse with its deterministic equivalent."""
def test_QuantalSTPSynapse(self):
"""Compare quantal_stp_synapse with its deterministic equivalent"""
nest.ResetKernel()
nest.SetKernelStatus({"rng_seed": 1})
nest.set_verbosity(100)
n_syn = 12 # number of synapses in a connection
n_trials = 100 # number of measurement trials
# parameter set for facilitation
fac_params = {"U": 0.03, "u": 0.03,
"tau_fac": 500., "tau_rec": 200., "weight": 1.}
# Here we assign the parameter set to the synapse models
t1_params = fac_params # for tsodyks2_synapse
t2_params = t1_params.copy() # for furhmann_synapse
t1_params["synapse_model"] = "tsodyks2_synapse"
t2_params["n"] = n_syn
t2_params["weight"] = 1. / n_syn
t2_params["synapse_model"] = "quantal_stp_synapse"
source = nest.Create("spike_generator")
source.spike_times = [30., 60., 90., 120., 150., 180., 210., 240., 270., 300., 330., 360., 390., 900.]
parrot = nest.Create("parrot_neuron")
neuron = nest.Create("iaf_psc_exp", 2, params={"tau_syn_ex": 3., "tau_m": 70.})
# We must send spikes via parrot because devices cannot
# connect through plastic synapses
# See #478.
nest.Connect(source, parrot)
nest.Connect(parrot, neuron[:1], syn_spec=t1_params)
nest.Connect(parrot, neuron[1:], syn_spec=t2_params)
voltmeter = nest.Create("voltmeter", 2)
t_tot = 1500.
# the following is a dry run trial so that the synapse dynamics is
# idential in all subsequent trials.
nest.Simulate(t_tot)
# Now we connect the voltmeters
nest.Connect(voltmeter[:1], neuron[:1])
nest.Connect(voltmeter[1:], neuron[1:])
for t in range(n_trials):
t_net = nest.GetKernelStatus("biological_time")
nest.SetStatus(source, {"origin": t_net})
nest.Simulate(t_tot)
nest.Simulate(.1) # flush the last voltmeter events from the queue
vm = numpy.array(voltmeter[1].events["V_m"])
vm_reference = numpy.array(voltmeter[0].events["V_m"])
assert(len(vm) % n_trials == 0)
n_steps = int(len(vm) / n_trials)
vm.shape = (n_trials, n_steps)
vm_reference.shape = (n_trials, n_steps)
vm_mean = numpy.mean(vm, axis=0)
vm_ref_mean = numpy.mean(vm_reference, axis=0)
error = numpy.sqrt((vm_ref_mean - vm_mean)**2)
self.assertLess(numpy.max(error), 4.0e-4)
def suite():
suite = unittest.makeSuite(QuantalSTPSynapseTestCase, "test")
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
| gpl-2.0 | -6,447,948,262,742,387,000 | 32.026087 | 110 | 0.630595 | false |
wilvk/ansible | lib/ansible/plugins/action/group_by.py | 54 | 1818 | # Copyright 2012, Jeroen Hoekx <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.module_utils.six import string_types
class ActionModule(ActionBase):
''' Create inventory groups based on variables '''
# We need to be able to modify the inventory
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if 'key' not in self._task.args:
result['failed'] = True
result['msg'] = "the 'key' param is required when using group_by"
return result
group_name = self._task.args.get('key')
parent_groups = self._task.args.get('parents', ['all'])
if isinstance(parent_groups, string_types):
parent_groups = [parent_groups]
result['changed'] = False
result['add_group'] = group_name.replace(' ', '-')
result['parent_groups'] = [name.replace(' ', '-') for name in parent_groups]
return result
| gpl-3.0 | 1,685,535,593,504,862,000 | 36.102041 | 84 | 0.673267 | false |
Spark001/LabelImg | libs/labelDialog.py | 1 | 3616 | try:
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
except ImportError:
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from lib import newIcon, labelValidator, newAction, addActions
from functools import partial
BB = QDialogButtonBox
class LabelDialog(QDialog):
def __init__(self, text="Enter object label", parent=None, listItem=None):
super(LabelDialog, self).__init__(parent)
self.edit = QLineEdit()
self.edit.setText(text)
self.edit.setValidator(labelValidator())
self.edit.editingFinished.connect(self.postProcess)
layout = QVBoxLayout()
layout.addWidget(self.edit)
self.buttonBox = bb = BB(BB.Ok | BB.Cancel, Qt.Horizontal, self)
bb.button(BB.Ok).setIcon(newIcon('done'))
bb.button(BB.Cancel).setIcon(newIcon('undo'))
bb.accepted.connect(self.validate)
bb.rejected.connect(self.reject)
layout.addWidget(bb)
self.listItem = listItem[:]
if listItem is not None and len(listItem) > 0:
self.listWidget = QListWidget(self)
for item in listItem:
#qDebug(item)
self.listWidget.addItem(item)
self.listWidget.itemDoubleClicked.connect(self.listItemClick)
layout.addWidget(self.listWidget)
# add a shortcut to choose the first label
action = partial(newAction, self)
choose1 = action('@Choose label', self.choose,
' ', None, u'Choose label')
addActions(self.listWidget, (choose1,))
self.setLayout(layout)
def validate(self):
try:
if self.edit.text().trimmed():
self.accept()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
if self.edit.text().strip():
self.accept()
def postProcess(self):
try:
self.edit.setText(self.edit.text().trimmed())
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
self.edit.setText(self.edit.text())
def popUp(self, text='', move=True):
self.edit.setText(text)
self.edit.setSelection(0, len(text))
self.edit.setFocus(Qt.PopupFocusReason)
if move:
self.move(QCursor.pos())
return self.edit.text() if self.exec_() else None
def listItemClick(self, tQListWidgetItem):
try:
text = tQListWidgetItem.text().trimmed()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
text = tQListWidgetItem.text().strip()
self.edit.setText(text)
self.validate()
def ch1(self):
if self.listItem is not None and len(self.listItem) > 0:
try:
text = self.listItem[0].trimmed()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
text = self.listItem[0].strip()
self.edit.setText(text)
self.validate()
def choose(self):
if self.listItem is not None and len(self.listItem) > 0:
try:
text = self.listWidget.currentItem().text().trimmed()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
text = self.listWidget.currentItem().text().strip()
self.edit.setText(text)
self.validate()
| mit | -1,995,101,432,184,391,400 | 35.16 | 80 | 0.592644 | false |
OceanOptics/FloatProcess | test_process.py | 1 | 1894 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: nils
# @Date: 2016-03-10 16:02:19
# @Last Modified by: nils
# @Last Modified time: 2017-01-03 22:33:53
# MAIN_RT is a script that is intended to run on individual profiles
# the following processing is done:
# 1. load float configuration
# 2. import profile
# 3. update dashboard
# 4. process profile
# .1 convert counts to scientific units
# .2 apply corrections (NPQ, O2_temp, O2_sal)
# .3 estimate other products
# 5. export new data in Seabass format
from toolbox import *
from process import *
from dashboard import *
import os # list files in dir
# 0. Parameters
dir_data = '/Users/nils/Documents/UMaine/Lab/data/NAAMES/floats/RAW_EOT/'
dir_config = '/Users/nils/Documents/UMaine/Lab/data/NAAMES/floats/param/'
dir_www = '/Users/nils/Documents/MATLAB/Float_DB/output/'
float_status = 'NAAMES_float_status.json'
filename_profile = dir_data + 'n0572/0572.023.msg'
# 1. Load float configuration
foo = filename_profile.split('/')
user_id = foo[-2]
foo = foo[-1].split('.')
float_id = foo[0]
profile_id = foo[1]
if __debug__:
print(user_id, float_id, profile_id)
cfg = import_usr_cfg(dir_config + user_id + '_config.json')
if __debug__:
print("Float configuration : ")
print(cfg)
# 2. Import profile
profile = import_msg(filename_profile)
if __debug__:
print("Profile : ")
print(profile)
# 3. Update dashboard
# Get number of profile
profile_n = len([name for name in os.listdir(
dir_data + user_id + '/') if name.find('.msg') != -1])
if __debug__:
print('Number of profiles: '+str(profile_n))
# update dashboard
update_float_status(dir_www+float_status,user_id,wmo=cfg['wmo'],dt_last=profile['dt'],profile_n=profile_n)
# 4.1 Convert counts to scientific units
# 4.2 Apply corrections
# 4.3 Estimate other products
# 5. Export data
# 6. Update figures
| gpl-3.0 | -3,536,137,749,781,354,500 | 25.305556 | 106 | 0.676346 | false |
GNOME/billreminder | src/gui/widgets/trayicon.py | 2 | 3562 | # -*- coding: utf-8 -*-
__all__ = ['NotifyIcon']
import sys
import os
import gtk
import time
try:
import appindicator
APPINDICATOR = True
except:
APPINDICATOR = False
from lib import common
from lib.utils import ContextMenu
class NotifyIcon:
""" This class creates the tray icon notification - GTK 2.10 or above """
def __init__(self,parent):
""" Constructor """
self.parent = parent
#show the icon
self.start()
def start(self):
""" Function used to show an icon in notification area."""
if APPINDICATOR:
self.indicator = appindicator.Indicator("billreminder", "ApplicationStatus", appindicator.CATEGORY_APPLICATION_STATUS)
self.indicator.set_status(appindicator.STATUS_ACTIVE)
self.indicator.set_attention_icon(common.PANEL_ATTENTION_ICON)
self.indicator.set_icon(common.PANEL_ICON)
self.menu = self.show_menu(None, None, None)
self.indicator.set_menu(self.menu)
self.menu.show()
else:
self.tray = gtk.StatusIcon()
self.tray.set_from_file(common.TRAY_ICON)
self.tray.set_tooltip(_("BillReminder"))
self.tray.connect("popup-menu", self.show_menu, None)
self.tray.connect("activate", self.show_hide, None)
def show_hide(self, status_icon, arg=None):
""" Show and Hide the main window. """
self.parent.show_hide_window()
if isinstance(status_icon, gtk.MenuItem):
menu_item = status_icon
if self.parent.get_window_visibility():
menu_item.set_label(_('Hide Window'))
else:
menu_item.set_label(_('Show Window'))
def show_menu(self, status_icon, button, activate_time, arg=None):
""" Show a popup menu when an user right clicks notification
area icon. """
c = ContextMenu(self)
if self.parent.get_window_visibility():
c.addMenuItem(_('Hide Window'), self.show_hide)
else:
c.addMenuItem(_('Show Window'), self.show_hide)
c.addMenuItem('-', None)
c.addMenuItem(_('Preferences'),
self.parent.on_btnPrefs_activate,
gtk.STOCK_PREFERENCES)
c.addMenuItem(_('About'),
self.parent.on_btnAbout_activate,
gtk.STOCK_ABOUT)
c.addMenuItem('-', None)
c.addMenuItem(_('Quit'),
self.parent.on_btnQuit_activate,
gtk.STOCK_QUIT)
if APPINDICATOR:
return c
print type(activate_time)
c.popup(None,
None,
gtk.status_icon_position_menu,
button,
activate_time, self.tray)
del c
def destroy(self):
""" Hide the systray icon. """
self.tray.set_visible(False)
def exists(self):
""" Do nothing here, only returns that the class was instantiated."""
return True
def get_hints(self):
hints = {}
x = self.tray.get_geometry()[1].x
y = self.tray.get_geometry()[1].y
w = self.tray.get_geometry()[1].width
h = self.tray.get_geometry()[1].height
x += w/2
if y < 100:
# top-panel
y += h/2
else:
# bottom-panel
y -= h/2
hints['x'] = x
hints['y'] = y
hints['desktop-entry'] = 'billreminder'
self.hints = hints
return hints
| gpl-3.0 | 5,821,658,813,228,889,000 | 30.245614 | 130 | 0.549972 | false |
seecr/seecr-test | test/mockservertest.py | 1 | 2939 | ## begin license ##
#
# "Seecr Test" provides test tools.
#
# Copyright (C) 2013, 2019-2021 Seecr (Seek You Too B.V.) https://seecr.nl
#
# This file is part of "Seecr Test"
#
# "Seecr Test" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Seecr Test" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Seecr Test"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
from unittest import TestCase
from seecr.test.mockserver import MockServer
from seecr.test.portnumbergenerator import PortNumberGenerator
from time import time
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
from sys import version_info
PY_VERSION = '%s.%s' % version_info[:2]
class MockServerTest(TestCase):
def setUp(self):
TestCase.setUp(self)
self.ms = MockServer(port=PortNumberGenerator.next())
def testResponse(self):
self.ms.response = 'HTTP/1.0 200 OK\r\n\r\nRe-Sponsed.'
self.ms.start()
self.assertEqual([], self.ms.requests)
self.assertEqual(b'Re-Sponsed.', urlopen(self.ms.myUrl).read())
self.assertEqual(b'Re-Sponsed.', urlopen(self.ms.myUrl).read())
self.assertEqual(2, len(self.ms.requests))
self.assertTrue(b'User-Agent: Python-urllib' in self.ms.requests[0], self.ms.requests[0])
self.assertTrue(b'GET / HTTP/1.1\r\n' in self.ms.requests[0], self.ms.requests[0])
def testResponses(self):
self.ms.responses = ['HTTP/1.0 200 OK\r\n\r\nRe-Sponsed.', 'HTTP/1.0 200 OK\r\n\r\nAnother-Sponsed.']
self.ms.start()
self.assertEqual(b'Re-Sponsed.', urlopen(self.ms.myUrl).read())
self.assertEqual(b'Another-Sponsed.', urlopen(self.ms.myUrl).read())
self.assertRaises(HTTPError, lambda: urlopen(self.ms.myUrl).read())
self.assertEqual(3, len(self.ms.requests))
def testHangupConnectionTimeout(self):
expectedException = IOError
self.ms.socket.close()
self.ms = MockServer(port=PortNumberGenerator.next(), hangupConnectionTimeout=0.1)
self.ms.start()
t0 = time()
self.assertRaises(expectedException, lambda: urlopen(self.ms.myUrl).read())
t1 = time()
delta = t1 - t0
self.assertTrue(0.09 < delta < 0.12, "Expected around 0.1, was %s" % delta)
self.assertEqual(0, len(self.ms.requests))
def tearDown(self):
self.ms.halt = True
TestCase.tearDown(self)
| gpl-2.0 | 1,053,645,833,172,903,300 | 37.168831 | 109 | 0.684927 | false |
acarmel/CouchPotatoServer | couchpotato/core/plugins/browser.py | 32 | 3893 | import ctypes
import os
import string
import traceback
import time
from couchpotato import CPLog
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import sp, ss, toUnicode
from couchpotato.core.helpers.variable import getUserDir
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
if os.name == 'nt':
import imp
try:
imp.find_module('win32file')
except:
# todo:: subclass ImportError for missing dependencies, vs. broken plugins?
raise ImportError("Missing the win32file module, which is a part of the prerequisite \
pywin32 package. You can get it from http://sourceforge.net/projects/pywin32/files/pywin32/")
else:
# noinspection PyUnresolvedReferences
import win32file
autoload = 'FileBrowser'
class FileBrowser(Plugin):
def __init__(self):
addApiView('directory.list', self.view, docs = {
'desc': 'Return the directory list of a given directory',
'params': {
'path': {'desc': 'The directory to scan'},
'show_hidden': {'desc': 'Also show hidden files'}
},
'return': {'type': 'object', 'example': """{
'is_root': bool, //is top most folder
'parent': string, //parent folder of requested path
'home': string, //user home folder
'empty': bool, //directory is empty
'dirs': array, //directory names
}"""}
})
def getDirectories(self, path = '/', show_hidden = True):
# Return driveletters or root if path is empty
if path == '/' or not path or path == '\\':
if os.name == 'nt':
return self.getDriveLetters()
path = '/'
dirs = []
path = sp(path)
for f in os.listdir(path):
p = sp(os.path.join(path, f))
if os.path.isdir(p) and ((self.is_hidden(p) and bool(int(show_hidden))) or not self.is_hidden(p)):
dirs.append(toUnicode('%s%s' % (p, os.path.sep)))
return sorted(dirs)
def getFiles(self):
pass
def getDriveLetters(self):
driveletters = []
for drive in string.ascii_uppercase:
if win32file.GetDriveType(drive + ':') in [win32file.DRIVE_FIXED, win32file.DRIVE_REMOTE, win32file.DRIVE_RAMDISK, win32file.DRIVE_REMOVABLE]:
driveletters.append(drive + ':\\')
return driveletters
def view(self, path = '/', show_hidden = True, **kwargs):
home = getUserDir()
if not path:
path = home
try:
dirs = self.getDirectories(path = path, show_hidden = show_hidden)
except:
log.error('Failed getting directory "%s" : %s', (path, traceback.format_exc()))
dirs = []
parent = os.path.dirname(path.rstrip(os.path.sep))
if parent == path.rstrip(os.path.sep):
parent = '/'
elif parent != '/' and parent[-2:] != ':\\':
parent += os.path.sep
return {
'is_root': path == '/',
'empty': len(dirs) == 0,
'parent': parent,
'home': home + os.path.sep,
'platform': os.name,
'dirs': dirs,
}
def is_hidden(self, filepath):
name = ss(os.path.basename(os.path.abspath(filepath)))
return name.startswith('.') or self.has_hidden_attribute(filepath)
def has_hidden_attribute(self, filepath):
result = False
try:
attrs = ctypes.windll.kernel32.GetFileAttributesW(sp(filepath)) #@UndefinedVariable
assert attrs != -1
result = bool(attrs & 2)
except (AttributeError, AssertionError):
pass
except:
log.error('Failed getting hidden attribute: %s', traceback.format_exc())
return result
| gpl-3.0 | -7,023,234,115,289,796,000 | 30.144 | 154 | 0.578731 | false |
nicolashainaux/mathmaker | tests/integration/mental_calculation/04_yellow1/test_04_yellow1_challenge.py | 1 | 1621 | # -*- coding: utf-8 -*-
# Mathmaker creates automatically maths exercises sheets
# with their answers
# Copyright 2006-2018 Nicolas Hainaux <[email protected]>
# This file is part of Mathmaker.
# Mathmaker is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
# Mathmaker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Mathmaker; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from mathmaker.lib import shared
from mathmaker.lib.document.frames import Sheet
def test_challenge():
"""Check this sheet is generated without any error."""
shared.machine.write_out(str(Sheet('mental_calculation',
'04_yellow1',
'challenge')),
pdf_output=True)
def test_challenge_embedding_js():
"""Check this sheet is generated without any error."""
shared.machine.write_out(str(Sheet('mental_calculation',
'04_yellow1',
'challenge',
enable_js_form=True)),
pdf_output=True)
| gpl-3.0 | 7,308,699,029,930,740,000 | 37.595238 | 76 | 0.634793 | false |
daira/tahoe-lafs-debian | src/allmydata/manhole.py | 5 | 10635 |
# this is adapted from my code in Buildbot -warner
import binascii, base64
from twisted.python import log
from twisted.application import service, strports
from twisted.cred import checkers, portal
from twisted.conch import manhole, telnet, manhole_ssh, checkers as conchc
from twisted.conch.insults import insults
from twisted.internet import protocol
from zope.interface import implements
from allmydata.util.fileutil import precondition_abspath
# makeTelnetProtocol and _TelnetRealm are for the TelnetManhole
class makeTelnetProtocol:
# this curries the 'portal' argument into a later call to
# TelnetTransport()
def __init__(self, portal):
self.portal = portal
def __call__(self):
auth = telnet.AuthenticatingTelnetProtocol
return telnet.TelnetTransport(auth, self.portal)
class _TelnetRealm:
implements(portal.IRealm)
def __init__(self, namespace_maker):
self.namespace_maker = namespace_maker
def requestAvatar(self, avatarId, *interfaces):
if telnet.ITelnetProtocol in interfaces:
namespace = self.namespace_maker()
p = telnet.TelnetBootstrapProtocol(insults.ServerProtocol,
manhole.ColoredManhole,
namespace)
return (telnet.ITelnetProtocol, p, lambda: None)
raise NotImplementedError()
class chainedProtocolFactory:
# this curries the 'namespace' argument into a later call to
# chainedProtocolFactory()
def __init__(self, namespace):
self.namespace = namespace
def __call__(self):
return insults.ServerProtocol(manhole.ColoredManhole, self.namespace)
class AuthorizedKeysChecker(conchc.SSHPublicKeyDatabase):
"""Accept connections using SSH keys from a given file.
SSHPublicKeyDatabase takes the username that the prospective client has
requested and attempts to get a ~/.ssh/authorized_keys file for that
username. This requires root access, so it isn't as useful as you'd
like.
Instead, this subclass looks for keys in a single file, given as an
argument. This file is typically kept in the buildmaster's basedir. The
file should have 'ssh-dss ....' lines in it, just like authorized_keys.
"""
def __init__(self, authorized_keys_file):
precondition_abspath(authorized_keys_file)
self.authorized_keys_file = authorized_keys_file
def checkKey(self, credentials):
f = open(self.authorized_keys_file)
for l in f.readlines():
l2 = l.split()
if len(l2) < 2:
continue
try:
if base64.decodestring(l2[1]) == credentials.blob:
return 1
except binascii.Error:
continue
return 0
class ModifiedColoredManhole(manhole.ColoredManhole):
def connectionMade(self):
manhole.ColoredManhole.connectionMade(self)
# look in twisted.conch.recvline.RecvLine for hints
self.keyHandlers["\x08"] = self.handle_BACKSPACE
self.keyHandlers["\x15"] = self.handle_KILLLINE
self.keyHandlers["\x01"] = self.handle_HOME
self.keyHandlers["\x04"] = self.handle_DELETE
self.keyHandlers["\x05"] = self.handle_END
self.keyHandlers["\x0b"] = self.handle_KILLLINE # really kill-to-end
#self.keyHandlers["\xe2"] = self.handle_BACKWARDS_WORD # M-b
#self.keyHandlers["\xe6"] = self.handle_FORWARDS_WORD # M-f
def handle_KILLLINE(self):
self.handle_END()
for i in range(len(self.lineBuffer)):
self.handle_BACKSPACE()
class _BaseManhole(service.MultiService):
"""This provides remote access to a python interpreter (a read/exec/print
loop) embedded in the buildmaster via an internal SSH server. This allows
detailed inspection of the buildmaster state. It is of most use to
buildbot developers. Connect to this by running an ssh client.
"""
def __init__(self, port, checker, using_ssh=True):
"""
@type port: string or int
@param port: what port should the Manhole listen on? This is a
strports specification string, like 'tcp:12345' or
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
simple tcp port.
@type checker: an object providing the
L{twisted.cred.checkers.ICredentialsChecker} interface
@param checker: if provided, this checker is used to authenticate the
client instead of using the username/password scheme. You must either
provide a username/password or a Checker. Some useful values are::
import twisted.cred.checkers as credc
import twisted.conch.checkers as conchc
c = credc.AllowAnonymousAccess # completely open
c = credc.FilePasswordDB(passwd_filename) # file of name:passwd
c = conchc.UNIXPasswordDatabase # getpwnam() (probably /etc/passwd)
@type using_ssh: bool
@param using_ssh: If True, accept SSH connections. If False, accept
regular unencrypted telnet connections.
"""
# unfortunately, these don't work unless we're running as root
#c = credc.PluggableAuthenticationModulesChecker: PAM
#c = conchc.SSHPublicKeyDatabase() # ~/.ssh/authorized_keys
# and I can't get UNIXPasswordDatabase to work
service.MultiService.__init__(self)
if type(port) is int:
port = "tcp:%d" % port
self.port = port # for comparison later
self.checker = checker # to maybe compare later
def makeNamespace():
# close over 'self' so we can get access to .parent later
from allmydata import debugshell
debugshell.app = self.parent # make node accessible via 'app'
namespace = {}
for sym in dir(debugshell):
if sym.startswith('__') and sym.endswith('__'):
continue
namespace[sym] = getattr(debugshell, sym)
return namespace
def makeProtocol():
namespace = makeNamespace()
p = insults.ServerProtocol(ModifiedColoredManhole, namespace)
return p
self.using_ssh = using_ssh
if using_ssh:
r = manhole_ssh.TerminalRealm()
r.chainedProtocolFactory = makeProtocol
p = portal.Portal(r, [self.checker])
f = manhole_ssh.ConchFactory(p)
else:
r = _TelnetRealm(makeNamespace)
p = portal.Portal(r, [self.checker])
f = protocol.ServerFactory()
f.protocol = makeTelnetProtocol(p)
s = strports.service(self.port, f)
s.setServiceParent(self)
def startService(self):
service.MultiService.startService(self)
if self.using_ssh:
via = "via SSH"
else:
via = "via telnet"
log.msg("Manhole listening %s on port %s" % (via, self.port))
class TelnetManhole(_BaseManhole):
"""This Manhole accepts unencrypted (telnet) connections, and requires a
username and password authorize access. You are encouraged to use the
encrypted ssh-based manhole classes instead."""
def __init__(self, port, username, password):
"""
@type port: string or int
@param port: what port should the Manhole listen on? This is a
strports specification string, like 'tcp:12345' or
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
simple tcp port.
@param username:
@param password: username= and password= form a pair of strings to
use when authenticating the remote user.
"""
self.username = username
self.password = password
c = checkers.InMemoryUsernamePasswordDatabaseDontUse()
c.addUser(username, password)
_BaseManhole.__init__(self, port, c, using_ssh=False)
class PasswordManhole(_BaseManhole):
"""This Manhole accepts encrypted (ssh) connections, and requires a
username and password to authorize access.
"""
def __init__(self, port, username, password):
"""
@type port: string or int
@param port: what port should the Manhole listen on? This is a
strports specification string, like 'tcp:12345' or
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
simple tcp port.
@param username:
@param password: username= and password= form a pair of strings to
use when authenticating the remote user.
"""
self.username = username
self.password = password
c = checkers.InMemoryUsernamePasswordDatabaseDontUse()
c.addUser(username, password)
_BaseManhole.__init__(self, port, c)
class AuthorizedKeysManhole(_BaseManhole):
"""This Manhole accepts ssh connections, and requires that the
prospective client have an ssh private key that matches one of the public
keys in our authorized_keys file. It is created with the name of a file
that contains the public keys that we will accept."""
def __init__(self, port, keyfile):
"""
@type port: string or int
@param port: what port should the Manhole listen on? This is a
strports specification string, like 'tcp:12345' or
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
simple tcp port.
@param keyfile: the path of a file that contains SSH public keys of
authorized users, one per line. This is the exact
same format as used by sshd in ~/.ssh/authorized_keys .
The path should be absolute.
"""
self.keyfile = keyfile
c = AuthorizedKeysChecker(keyfile)
_BaseManhole.__init__(self, port, c)
class ArbitraryCheckerManhole(_BaseManhole):
"""This Manhole accepts ssh connections, but uses an arbitrary
user-supplied 'checker' object to perform authentication."""
def __init__(self, port, checker):
"""
@type port: string or int
@param port: what port should the Manhole listen on? This is a
strports specification string, like 'tcp:12345' or
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
simple tcp port.
@param checker: an instance of a twisted.cred 'checker' which will
perform authentication
"""
_BaseManhole.__init__(self, port, checker)
| gpl-2.0 | 9,179,711,797,089,938,000 | 37.11828 | 79 | 0.63874 | false |
squall1988/lquant | test/test_v_state.py | 1 | 2168 | # coding=utf-8
__author__ = 'squall'
import pandas as pd
import numpy as np
import matplotlib.pylab as plt
import os
total = 0
total_event = 0
final_result = []
for roots, _, files in os.walk('d:/stock/new-data'):
for f in files:
file_path = os.path.join(roots, f)
try:
data = pd.read_csv(file_path, sep='\t')
if data.values.shape[0] > 100:
total_event += data.values.shape[0]
min_data = data.get(["OPEN", "CLOSE"]).values.min(axis=1)
real_pillar = data.OPEN.values - data.CLOSE.values
down_side = min_data - data.LOW.values
result = []
for x, y, z in zip(down_side[1:], real_pillar[1:], data.CLOSE.values[:-1]):
if y != 0.0:
result.append(x/z)
# print result
down_up_rate = down_side/real_pillar
result = []
days = []
for c, d, t, h, i in zip(data.CLOSE.values[:-1], down_side[1:], data.CLOSE.values[1:],
data.HIGH.values[1:],
data.DATE.values[1:]):
result.append([d/c*100, t, h])
days.append(i)
result = np.array(result)
rate_result = []
for i in range(len(result)-5):
if result[i, 0] > 5:
rate_result.append((np.max(result[i+1:i+5, 2]) - result[i, 1])/result[i, 1]*100)
# print days[i]
rate_result = np.array(rate_result)
a = np.sum(rate_result>2)
# print np.where(rate_result>2)
b = rate_result.shape[0]
# print a, b
# print a*1.0/b
total += b
# print 'this is the mean', np.mean(rate_result)
final_result.append(np.mean(rate_result))
print f
except Exception as e:
print e.message
# plt.plot(rate_result)
print total
print total_event
plt.hist(final_result, bins=100)
plt.show() | bsd-2-clause | 3,792,033,364,546,819,000 | 32.890625 | 104 | 0.4631 | false |
aniruddha-adhikary/fudging-automators | scrapers/result_checker-hsc.py | 1 | 2049 | #!/usr/bin/env python
# This script fetches (probably doesn't work anymore) results of
# HSC Exam, Bangaldesh.
import urllib2
from BeautifulSoup import BeautifulSoup
def get_html(exam, year, board, roll):
"""Get the result page HTML output directly.
get_html(exam, year, board, roll)"""
base_url = "http://www.educationboardresults.gov.bd/regular"
base_index = 'index.php'
base_result = 'result.php'
index_response = urllib2.urlopen(base_url + '/' + base_index)
# Getting PHPSESSID session cookie
session_cookie = index_response.info().headers[3].split()[1][0:36]
post_data = "sr=3&et=0&exam=%s&year=%s&board=%s&roll=%s&button2=Submit" \
% (exam, year, board, roll)
result_request = urllib2.Request(base_url + "/" + base_result, headers = {
'Cookie': session_cookie
})
result_response = urllib2.urlopen(result_request, post_data)
return result_response.read()
def get_result(exam, year, board, roll):
"""Get the result as a dictionary.
get_result(exam, year, board, roll)"""
html_response = get_html(exam, year, board, roll)
souped_html = BeautifulSoup(html_response)
souped_html_td = souped_html.findAll('td')
td_list = []
# Forming a new list with only text values
for each_element in souped_html_td:
td_list.append(each_element.text)
# Gathering the grades
result_list = td_list[td_list.index('Grade')+1:td_list.index('Search Again')]
student_data = {
'info': {
'name': td_list[27].lower(),
'group': td_list[33].lower(),
'session': td_list[37],
'registration': td_list[39],
'student_type': td_list[41].lower(),
'gpa': td_list[49]
},
}
full_dict = dict(student_data.items() + dictify(result_list).items())
return full_dict
def dictify(result_list):
"""Make a dictionary from the souped result list
dictify(result_list)"""
result_dict = {'grades': {}}
element_counter = 1
while(element_counter < len(result_list)):
result_dict['grades'][result_list[element_counter]] = result_list[element_counter+1]
element_counter = element_counter + 3
return result_dict | mit | -914,201,017,888,720,000 | 25.973684 | 86 | 0.687164 | false |
cloudera/hue | desktop/core/ext-py/navoptapi-0.1.0/altuscli/__init__.py | 4 | 3220 | # Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Modifications made by Cloudera are:
# Copyright (c) 2016 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import re
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
VERSION = __version__
ALTUSCLI_ROOT = os.path.dirname(os.path.abspath(__file__))
# Used to specify anonymous (unsigned) request signature
UNSIGNED = object()
SCALAR_TYPES = set(['string',
'float',
'integer',
'long',
'boolean',
'double',
'blob',
'datetime'])
LIST_TYPE = 'array'
OBJECT_TYPE = 'object'
REF_KEY = '$ref'
REF_NAME_PREFIX = '#/definitions/'
COMPLEX_TYPES = set([OBJECT_TYPE,
LIST_TYPE])
DEFAULT_PROFILE_NAME = 'default'
ALTUS_ACCESS_KEY_ID_KEY_NAME = 'altus_access_key_id'
ALTUS_PRIVATE_KEY_KEY_NAME = 'altus_private_key'
# Python argparse has a bug in which '-' are not parsed correctly if they appear
# as values for other arguments, see: http://bugs.python.org/issue9334 for more
# details. This defines special encoding for dash that we will "decode" and
# replace with a dash. The reason we are using \\ is that there is a non zero
# chance that customers can discover this themselves.
ARGPARSE_DASH_ENCODING = '\\-'
# Prepopulate the cache with special cases that don't match our regular
# transformation.
_xform_cache = {
('s3GuardConfiguration', '-'): 's3-guard-configuration',
}
_first_cap_regex = re.compile('(.)([A-Z][a-z]+)')
_number_cap_regex = re.compile('([a-z])([0-9]+)')
_end_cap_regex = re.compile('([a-z0-9])([A-Z])')
# The regex below handles the special case where some acryonym
# name is pluralized, e.g GatewayARNs, ListWebACLs, SomeCNAMEs.
_special_case_transform = re.compile('[A-Z]{3,}s$')
def xform_name(name, sep='_', _xform_cache=_xform_cache):
if sep in name:
# If the sep is in the name, assume that it's already
# transformed and return the string unchanged.
return name
key = (name, sep)
if key not in _xform_cache:
if _special_case_transform.search(name) is not None:
is_special = _special_case_transform.search(name)
matched = is_special.group()
# Replace something like CRNs, ACLs with _crns, _acls.
name = name[:-len(matched)] + sep + matched.lower()
s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name)
s2 = _number_cap_regex.sub(r'\1' + sep + r'\2', s1)
transformed = _end_cap_regex.sub(r'\1' + sep + r'\2', s2).lower()
_xform_cache[key] = transformed
return _xform_cache[key]
| apache-2.0 | -1,953,822,374,105,349,600 | 34.384615 | 80 | 0.643789 | false |
caoxudong/code_practice | leetcode/617_merge_two_binary_trees.py | 1 | 4569 | """
Given two binary trees and imagine that when you put one of them to cover the other, some nodes of the two trees are overlapped while the others are not.
You need to merge them into a new binary tree. The merge rule is that if two nodes overlap, then sum node values up as the new value of the merged node. Otherwise, the NOT null node will be used as the node of new tree.
Example 1:
Input:
Tree 1 Tree 2
1 2
/ \ / \
3 2 1 3
/ \ \
5 4 7
Output:
Merged tree:
3
/ \
4 5
/ \ \
5 4 7
Note: The merging process must start from the root nodes of both trees.
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def mergeTrees(self, t1: TreeNode, t2: TreeNode) -> TreeNode:
if t1 is None and t2 is None:
return None
else:
result_root = TreeNode(0)
stack = []
if t1 is None:
result_root.val = t2.val
stack.append((None, t2.right, result_root, False))
stack.append((None, t2.left, result_root, True))
else:
if t2 is None:
result_root.val = t1.val
stack.append((t1.right, None, result_root, False))
stack.append((t1.left, None, result_root, True))
else:
result_root.val = t1.val + t2.val
stack.append((t1.right, t2.right, result_root, False))
stack.append((t1.left, t2.left, result_root, True))
while len(stack) > 0:
node_tuple = stack.pop()
t1_node = node_tuple[0]
t2_node = node_tuple[1]
result_parent = node_tuple[2]
is_left_child = node_tuple[3]
if t1_node is None and t2_node is None:
continue
else:
if t1_node is None:
result_node = TreeNode(t2_node.val)
stack.append((None, t2_node.right, result_node, False))
stack.append((None, t2_node.left, result_node, True))
else:
if t2_node is None:
result_node = TreeNode(t1_node.val)
stack.append(
(t1_node.right, None, result_node, False))
stack.append(
(t1_node.left, None, result_node, True))
else:
result_node = TreeNode(t1_node.val + t2_node.val)
stack.append(
(t1_node.right, t2_node.right, result_node, False))
stack.append(
(t1_node.left, t2_node.left, result_node, True))
if is_left_child:
result_parent.left = result_node
else:
result_parent.right = result_node
return result_root
def create_tree(values):
values_len = len(values)
if values_len == 0:
return None
root = TreeNode(values[0])
nodes = []
nodes.append(root)
i = 1
while i < values_len:
node_parent = nodes.pop(0)
left_value = values[i]
if left_value is None:
node_parent.left = None
else:
left_node = TreeNode(left_value)
node_parent.left = left_node
nodes.append(left_node)
i += 1
if i < values_len:
right_value = values[i]
right_node = TreeNode(right_value)
node_parent.right = right_node
nodes.append(right_node)
i += 1
return root
if __name__ == "__main__":
tests = [
# ([[1, 3, 2, 5], [2, 1, 3, None, 4, None, 7]]),
([[], [1]])
]
s = Solution()
for t in tests:
s.mergeTrees(create_tree(t[0]), create_tree(t[1]))
| mit | 165,613,983,425,408,200 | 33.353383 | 219 | 0.446487 | false |
home-assistant/home-assistant | homeassistant/helpers/trigger.py | 5 | 3391 | """Triggers."""
from __future__ import annotations
import asyncio
import logging
from types import MappingProxyType
from typing import Any, Callable
import voluptuous as vol
from homeassistant.const import CONF_PLATFORM
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import IntegrationNotFound, async_get_integration
_PLATFORM_ALIASES = {
"device_automation": ("device",),
"homeassistant": ("event", "numeric_state", "state", "time_pattern", "time"),
}
async def _async_get_trigger_platform(hass: HomeAssistant, config: ConfigType) -> Any:
platform = config[CONF_PLATFORM]
for alias, triggers in _PLATFORM_ALIASES.items():
if platform in triggers:
platform = alias
break
try:
integration = await async_get_integration(hass, platform)
except IntegrationNotFound:
raise vol.Invalid(f"Invalid platform '{platform}' specified") from None
try:
return integration.get_platform("trigger")
except ImportError:
raise vol.Invalid(
f"Integration '{platform}' does not provide trigger support"
) from None
async def async_validate_trigger_config(
hass: HomeAssistant, trigger_config: list[ConfigType]
) -> list[ConfigType]:
"""Validate triggers."""
config = []
for conf in trigger_config:
platform = await _async_get_trigger_platform(hass, conf)
if hasattr(platform, "async_validate_trigger_config"):
conf = await platform.async_validate_trigger_config(hass, conf)
else:
conf = platform.TRIGGER_SCHEMA(conf)
config.append(conf)
return config
async def async_initialize_triggers(
hass: HomeAssistant,
trigger_config: list[ConfigType],
action: Callable,
domain: str,
name: str,
log_cb: Callable,
home_assistant_start: bool = False,
variables: dict[str, Any] | MappingProxyType | None = None,
) -> CALLBACK_TYPE | None:
"""Initialize triggers."""
info = {
"domain": domain,
"name": name,
"home_assistant_start": home_assistant_start,
"variables": variables,
}
triggers = []
for idx, conf in enumerate(trigger_config):
platform = await _async_get_trigger_platform(hass, conf)
info = {**info, "trigger_id": f"{idx}"}
triggers.append(platform.async_attach_trigger(hass, conf, action, info))
attach_results = await asyncio.gather(*triggers, return_exceptions=True)
removes = []
for result in attach_results:
if isinstance(result, HomeAssistantError):
log_cb(logging.ERROR, f"Got error '{result}' when setting up triggers for")
elif isinstance(result, Exception):
log_cb(logging.ERROR, "Error setting up trigger", exc_info=result)
elif result is None:
log_cb(
logging.ERROR, "Unknown error while setting up trigger (empty result)"
)
else:
removes.append(result)
if not removes:
return None
log_cb(logging.INFO, "Initialized trigger")
@callback
def remove_triggers(): # type: ignore
"""Remove triggers."""
for remove in removes:
remove()
return remove_triggers
| apache-2.0 | 4,498,958,735,861,948,000 | 30.990566 | 87 | 0.657033 | false |
g2p/SimpleTAL | tests/TALTests/XMLTests/TALSingletonTests.py | 1 | 10765 | #!/usr/bin/python
""" Copyright (c) 2009 Colin Stewart (http://www.owlfish.com/)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
If you make any bug fixes or feature enhancements please let me know!
Unit test cases.
"""
import unittest, os
import io
import logging, logging.config
import xml.sax, xml.sax.handler
from hashlib import md5
from simpletal import simpleTAL, simpleTALES
if (os.path.exists ("logging.ini")):
logging.config.fileConfig ("logging.ini")
else:
logging.basicConfig()
class stringHasher:
def __init__ (self, encoding = 'utf-8'):
self.encoding = encoding
self.hasher = md5()
def update (self, avalue):
if (isinstance (avalue, str)):
self.hasher.update(avalue.encode ('utf-8'))
else:
self.hasher.update (avalue)
def digest (self):
return self.hasher.digest()
def hexdigest (self):
return self.hasher.hexdigest()
class XMLChecksumHandler (xml.sax.handler.ContentHandler, xml.sax.handler.DTDHandler, xml.sax.handler.ErrorHandler):
def __init__ (self, parser):
xml.sax.handler.ContentHandler.__init__ (self)
self.ourParser = parser
def startDocument (self):
self.digest = stringHasher()
def startPrefixMapping (self, prefix, uri):
self.digest.update (prefix)
self.digest.update (uri)
def endPrefixMapping (self, prefix):
self.digest.update (prefix)
def startElement (self, name, atts):
self.digest.update (name)
allAtts = atts.getNames()
allAtts.sort()
for att in allAtts:
self.digest.update (att)
self.digest.update (atts [att])
def endElement (self, name):
self.digest.update (name)
def characters (self, data):
self.digest.update (data)
def processingInstruction (self, target, data):
self.digest.update (target)
self.digest.update (data)
def skippedEntity (self, name):
self.digest.update (name)
# DTD Handler
def notationDecl(self, name, publicId, systemId):
self.digest.update (name)
self.digest.update (publicId)
self.digest.update (systemId)
def unparsedEntityDecl(name, publicId, systemId, ndata):
self.digest.update (name)
self.digest.update (publicId)
self.digest.update (systemId)
self.digest.update (ndata)
def error (self, excpt):
print("Error: %s" % str (excpt))
def warning (self, excpt):
print("Warning: %s" % str (excpt))
def getDigest (self):
return self.digest.hexdigest()
CHECKSUMPARSER = xml.sax.make_parser()
CHECKSUMHANDLER = XMLChecksumHandler(CHECKSUMPARSER)
CHECKSUMPARSER.setContentHandler (CHECKSUMHANDLER)
CHECKSUMPARSER.setDTDHandler (CHECKSUMHANDLER)
CHECKSUMPARSER.setErrorHandler (CHECKSUMHANDLER)
def getXMLChecksum (doc):
CHECKSUMPARSER.parse (io.StringIO (doc))
return CHECKSUMHANDLER.getDigest()
class TALSingletonTests (unittest.TestCase):
def setUp (self):
self.context = simpleTALES.Context()
self.context.addGlobal ('test', 'testing')
self.context.addGlobal ('one', [1])
self.context.addGlobal ('two', ["one", "two"])
self.context.addGlobal ('three', ['1',"Two",'3'])
def _runBasicTest_ (self, text, result, errMsg = "Error"):
""" Runs a basic test - i.e. does full string compare rather than
checksum.
"""
template = simpleTAL.compileXMLTemplate (text)
file = io.StringIO ()
template.expand (self.context, file, outputEncoding="iso-8859-1")
realResult = file.getvalue().encode ('iso-8859-1')
self.failUnless (result == realResult, "%s - \npassed in: %s \ngot back %s \nexpected %s\n\nTemplate: %s" % (errMsg, text, realResult, result, template))
def _runTest_ (self, txt, result, errMsg="Error"):
template = simpleTAL.compileXMLTemplate (txt)
file = io.StringIO ()
template.expand (self.context, file, outputEncoding="iso-8859-1")
realResult = file.getvalue()
try:
expectedChecksum = getXMLChecksum (result)
except Exception as e:
self.fail ("Exception (%s) thrown parsing XML expected result: %s" % (str (e), result))
try:
realChecksum = getXMLChecksum (realResult)
except Exception as e:
self.fail ("Exception (%s) thrown parsing XML actual result: %s\nPage Template: %s" % (str (e), realResult, str (template)))
self.failUnless (expectedChecksum == realChecksum, "%s - \npassed in: %s \ngot back %s \nexpected %s\n\nTemplate: %s" % (errMsg, txt, realResult, result, template))
def _runMacroTest_ (self, macros, page, result, errMsg="Error"):
macroTemplate = simpleTAL.compileXMLTemplate (macros)
pageTemplate = simpleTAL.compileXMLTemplate (page)
self.context.addGlobal ("site", macroTemplate)
self.context.addGlobal ("here", pageTemplate)
file = io.StringIO ()
pageTemplate.expand (self.context, file)
realResult = file.getvalue()
try:
expectedChecksum = getXMLChecksum (result)
except Exception as e:
self.fail ("Exception (%s) thrown parsing XML expected result: %s" % (str (e), result))
try:
realChecksum = getXMLChecksum (realResult)
except Exception as e:
self.fail ("Exception (%s) thrown parsing XML actual result: %s\nPage Template: %s\nMacro Template: %s" % (str (e), realResult, str (pageTemplate), str (macroTemplate)))
self.failUnless (expectedChecksum == realChecksum, "%s - \npassed in macro: %s \n and page: %s\ngot back %s \nexpected %s\n\nPage Template: %s" % (errMsg, macros,page, realResult, result, pageTemplate))
def testBasicPassThrough (self):
self._runBasicTest_ ("""<?xml version="1.0" encoding="iso-8859-1"?><html><br /><p>Hello</p><hr /></html>""".encode ("iso-8859-1")
,"""<?xml version="1.0" encoding="iso-8859-1"?>\n<html><br /><p>Hello</p><hr /></html>""".encode ('iso-8859-1')
,"""Local define followed by attributes and global tes""")
def testDefineAttributes (self):
self._runTest_ ("""<html><br tal:define="temp test" tal:attributes="href temp"/><br tal:attributes="href temp"/></html>"""
,"""<html><br href="testing" /><br/></html>"""
,"""Local define followed by attributes and global test failed.""")
def testConditionDefine (self):
self._runTest_ ("""<html><br tal:define="global temp test" tal:attributes="href temp"/><br tal:condition="exists: temp"/><img tal:condition="not:exists:temp"/></html>"""
,"""<html><br href="testing" /><br/></html>"""
,"""Global define and condition failed""")
def testRepeatAttributes (self):
self._runTest_ ("""<html><br tal:repeat="temp three" tal:attributes="href temp"/></html>"""
,"""<html><br href="1" /><br href="Two"/><br href="3"/></html>"""
,"""Repeat and attributes failed.""")
def testContentRepeat (self):
self._runTest_ ("""<html><br tal:repeat="temp three" tal:content="temp"/></html>"""
,"""<html><br>1</br><br>Two</br><br>3</br></html>"""
,"""Content with Repeat failed.""")
def testReplaceRepeat (self):
self._runTest_ ("""<html><br tal:repeat="temp three" tal:replace="temp"/></html>"""
,"""<html>1Two3</html>"""
,"""Replace with Repeat failed.""")
def testReplaceRepeatAttributes (self):
self._runTest_ ("""<html><br tal:repeat="temp three" tal:attributes="href temp" tal:replace="temp"/></html>"""
,"""<html>1Two3</html>"""
,"""Replace with Repeat failed.""")
def testContentRepeatAttributes (self):
self._runTest_ ("""<html><br tal:repeat="temp three" tal:attributes="href temp" tal:content="temp"/></html>"""
,"""<html><br href="1">1</br><br href="Two">Two</br><br href="3">3</br></html>"""
,"""Content with Repeat and Attributes failed.""")
def testOmitTagContentRepeatAttributes (self):
self._runTest_ ("""<html><br tal:repeat="temp three" tal:omit-tag="default" tal:attributes="href temp" tal:content="temp"/></html>"""
,"""<html>1Two3</html>"""
,"""OmitTag with Content and Repeat and Attributes failed.""")
def testDefineMacroSlots (self):
self._runMacroTest_ ("""<html metal:define-macro="m1"><br metal:define-slot="sl1"/></html>"""
,"""<div metal:use-macro="site/macros/m1"><p metal:fill-slot="sl1">Hello</p></div>"""
,"""<html><p>Hello</p></html>"""
,"""METAL with define-slot on singleton failed.""")
def testDefineMacro (self):
self._runMacroTest_ ("""<html metal:define-macro="m1" id="test"/>"""
,"""<div metal:use-macro="site/macros/m1"><p metal:fill-slot="sl1">Hello</p></div>"""
,"""<html id="test"/>"""
,"""METAL with define-macro on singleton failed.""")
def testUseMacro (self):
self._runMacroTest_ ("""<html metal:define-macro="m1"><br metal:define-slot="sl1"/></html>"""
,"""<div metal:use-macro="site/macros/m1"/>"""
,"""<html><br/></html>"""
,"""METAL with use-macro on singleton failed.""")
def testFillSlot (self):
self._runMacroTest_ ("""<html metal:define-macro="m1"><br metal:define-slot="sl1"/></html>"""
,"""<div metal:use-macro="site/macros/m1"><i metal:fill-slot="sl1" id="test"/></div>"""
,"""<html><i id="test"/></html>"""
,"""METAL with fill-slot on singleton failed.""")
def testRepeatUseMacro (self):
self._runMacroTest_ ("""<html metal:define-macro="m1"><br metal:define-slot="sl1"/></html>"""
,"""<test><p tal:repeat="nums three"><div metal:use-macro="site/macros/m1"/></p></test>"""
,"""<test><p><html><br/></html></p><p><html><br/></html></p><p><html><br/></html></p></test>"""
,"""METAL with repeat and use-macro on singleton failed.""")
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 8,382,727,267,810,647,000 | 39.469925 | 204 | 0.672643 | false |
dellysunnymtech/bitbake | lib/toaster/orm/migrations/0008_auto__chg_field_variablehistory_operation__chg_field_recipe_descriptio.py | 6 | 16612 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'VariableHistory.operation'
db.alter_column(u'orm_variablehistory', 'operation', self.gf('django.db.models.fields.CharField')(max_length=64))
# Changing field 'Recipe.description'
db.alter_column(u'orm_recipe', 'description', self.gf('django.db.models.fields.TextField')())
# Changing field 'Target_Image_File.file_name'
db.alter_column(u'orm_target_image_file', 'file_name', self.gf('django.db.models.fields.FilePathField')(max_length=254))
# Changing field 'Package.description'
db.alter_column(u'orm_package', 'description', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Changing field 'VariableHistory.operation'
db.alter_column(u'orm_variablehistory', 'operation', self.gf('django.db.models.fields.CharField')(max_length=16))
# Changing field 'Recipe.description'
db.alter_column(u'orm_recipe', 'description', self.gf('django.db.models.fields.CharField')(max_length=100))
# Changing field 'Target_Image_File.file_name'
db.alter_column(u'orm_target_image_file', 'file_name', self.gf('django.db.models.fields.FilePathField')(max_length=100))
# Changing field 'Package.description'
db.alter_column(u'orm_package', 'description', self.gf('django.db.models.fields.CharField')(max_length=200))
models = {
u'orm.build': {
'Meta': {'object_name': 'Build'},
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'orm.helptext': {
'Meta': {'object_name': 'HelpText'},
'area': ('django.db.models.fields.IntegerField', [], {}),
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'orm.layer': {
'Meta': {'object_name': 'Layer'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'local_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'orm.layer_version': {
'Meta': {'object_name': 'Layer_Version'},
'branch': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_build'", 'to': u"orm['orm.Build']"}),
'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
'priority': ('django.db.models.fields.IntegerField', [], {})
},
u'orm.logmessage': {
'Meta': {'object_name': 'LogMessage'},
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
},
u'orm.package': {
'Meta': {'object_name': 'Package'},
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'orm.package_dependency': {
'Meta': {'object_name': 'Package_Dependency'},
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
},
u'orm.package_file': {
'Meta': {'object_name': 'Package_File'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
u'orm.recipe': {
'Meta': {'object_name': 'Recipe'},
'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'orm.recipe_dependency': {
'Meta': {'object_name': 'Recipe_Dependency'},
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
},
u'orm.target': {
'Meta': {'object_name': 'Target'},
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'orm.target_file': {
'Meta': {'object_name': 'Target_File'},
'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inodetype': ('django.db.models.fields.IntegerField', [], {}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
},
u'orm.target_image_file': {
'Meta': {'object_name': 'Target_Image_File'},
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
'file_size': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
},
u'orm.target_installed_package': {
'Meta': {'object_name': 'Target_Installed_Package'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
},
u'orm.task': {
'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
},
u'orm.task_dependency': {
'Meta': {'object_name': 'Task_Dependency'},
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
},
u'orm.variable': {
'Meta': {'object_name': 'Variable'},
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'orm.variablehistory': {
'Meta': {'object_name': 'VariableHistory'},
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
}
}
complete_apps = ['orm']
| gpl-2.0 | -3,423,948,738,375,569,400 | 72.831111 | 162 | 0.553937 | false |
BehavioralInsightsTeam/edx-platform | common/djangoapps/entitlements/admin.py | 13 | 3334 | """Admin forms for Course Entitlements"""
from django import forms
from django.contrib import admin
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from .models import CourseEntitlement, CourseEntitlementPolicy, CourseEntitlementSupportDetail
@admin.register(CourseEntitlement)
class CourseEntitlementAdmin(admin.ModelAdmin):
list_display = ('user',
'uuid',
'course_uuid',
'created',
'modified',
'expired_at',
'mode',
'enrollment_course_run',
'order_number')
raw_id_fields = ('enrollment_course_run', 'user',)
search_fields = ('user__username', 'uuid', 'course_uuid', 'mode', 'order_number')
class CourseEntitlementSupportDetailForm(forms.ModelForm):
"""Form for adding entitlement support details, exists mostly for testing purposes"""
def __init__(self, *args, **kwargs):
super(CourseEntitlementSupportDetailForm, self).__init__(*args, **kwargs)
if self.data.get('unenrolled_run'):
try:
self.data['unenrolled_run'] = CourseKey.from_string(self.data['unenrolled_run'])
except InvalidKeyError:
raise forms.ValidationError("No valid CourseKey for id {}!".format(self.data['unenrolled_run']))
def clean_course_id(self):
"""Cleans course id and attempts to make course key from string version of key"""
course_id = self.cleaned_data['unenrolled_run']
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
raise forms.ValidationError("Cannot make a valid CourseKey from id {}!".format(course_id))
if not modulestore().has_course(course_key):
raise forms.ValidationError("Cannot find course with id {} in the modulestore".format(course_id))
return course_key
class Meta:
fields = '__all__'
model = CourseEntitlementSupportDetail
@admin.register(CourseEntitlementSupportDetail)
class CourseEntitlementSupportDetailAdmin(admin.ModelAdmin):
"""
Registration of CourseEntitlementSupportDetail for Django Admin
"""
list_display = ('entitlement',
'support_user',
'comments',
'unenrolled_run')
raw_id_fields = ('unenrolled_run', 'support_user',)
form = CourseEntitlementSupportDetailForm
class CourseEntitlementPolicyForm(forms.ModelForm):
""" Form for creating custom course entitlement policies. """
def __init__(self, *args, **kwargs):
super(CourseEntitlementPolicyForm, self).__init__(*args, **kwargs)
self.fields['site'].required = False
self.fields['mode'].required = False
class Meta:
fields = '__all__'
model = CourseEntitlementPolicy
@admin.register(CourseEntitlementPolicy)
class CourseEntitlementPolicyAdmin(admin.ModelAdmin):
"""
Registration of CourseEntitlementPolicy for Django Admin
"""
list_display = ('expiration_period',
'refund_period',
'regain_period',
'mode',
'site')
form = CourseEntitlementPolicyForm
| agpl-3.0 | 7,709,993,555,587,085,000 | 35.637363 | 112 | 0.632573 | false |
kidaa30/spacewalk | backend/server/rhnServer/search_notify.py | 4 | 1394 | #
# Copyright (c) 2008--2015 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# Sends notification to search-server that it should update server index
#
import xmlrpclib
from spacewalk.common.rhnLog import log_error
class SearchNotify:
def __init__(self, host="127.0.0.1", port="2828"):
self.addr = "http://%s:%s" % (host, port)
def notify(self, indexName="server"):
try:
client = xmlrpclib.ServerProxy(self.addr)
result = client.admin.updateIndex(indexName)
except Exception, e:
log_error("Failed to notify search service located at %s to update %s indexes"
% (self.addr, indexName), e)
return False
return result
if __name__ == "__main__":
search = SearchNotify()
result = search.notify()
print "search.notify() = %s" % (result)
| gpl-2.0 | -5,595,695,197,977,589,000 | 33.85 | 90 | 0.674319 | false |
mlperf/training_results_v0.6 | Google/benchmarks/transformer/implementations/tpu-v3-2048-transformer/dataset_preproc/data_generators/wikisum/get_references_web.py | 7 | 3008 | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=line-too-long
r"""Fetch reference URLs from all groups for a single shard id.
Because of an SSL memory leak in Python 3.5, fetching too many URLs in the same
Python process will OOM. This script wraps get_references_web_single_group.py
and calls it through subprocess for each group in the shard, where each group is
~5k URLs.
Launch with parallel_launch.py
Each job should finish in ~5 hours with the settings below.
GCS_BUCKET=gs://my-bucket
python parallel_launch.py \
--num_instances=1000 \
--cpu=4 \
--mem=4 \
--name=get-refs-web \
--code_dir=./ \
--log_dir=$GCS_BUCKET/logs \
--setup_command="pip3 install aiohttp cchardet aiodns bs4 -q --user" \
--command_prefix="python3 wikisum/get_references_web.py --out_dir=$GCS_BUCKET/wiki_references --shard_id"
"""
# pylint: enable=line-too-long
import math
import os
import subprocess as sp
from tensor2tensor.data_generators.wikisum import get_references_web_single_group as fetch
from tensor2tensor.data_generators.wikisum import utils
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
flags.DEFINE_string(
"command",
"python3 -m "
"tensor2tensor.data_generators.wikisum.get_references_web_single_group",
"Command to run get_references_web_single_group, without flags.")
def main(_):
shard_urls = fetch.get_urls_for_shard(FLAGS.urls_dir, FLAGS.shard_id)
num_groups = int(math.ceil(len(shard_urls) / fetch.URLS_PER_CLIENT))
tf.logging.info("Launching get_references_web_single_group sequentially for "
"%d groups in shard %d. Total URLs: %d",
num_groups, FLAGS.shard_id, len(shard_urls))
command_prefix = FLAGS.command.split() + [
"--urls_dir=%s" % FLAGS.urls_dir,
"--shard_id=%d" % FLAGS.shard_id,
"--debug_num_urls=%d" % FLAGS.debug_num_urls,
]
with utils.timing("all_groups_fetch"):
for i in range(num_groups):
command = list(command_prefix)
out_dir = os.path.join(FLAGS.out_dir, "process_%d" % i)
command.append("--out_dir=%s" % out_dir)
command.append("--group_id=%d" % i)
try:
# Even on 1 CPU, each group should finish within an hour.
sp.check_call(command, timeout=60*60)
except sp.TimeoutExpired:
tf.logging.error("Group %d timed out", i)
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()
| apache-2.0 | 9,147,517,682,418,317,000 | 34.388235 | 109 | 0.694481 | false |
Juniper/euca2ools | euca2ools/commands/ec2/revoke.py | 1 | 1696 | # Copyright 2009-2013 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from euca2ools.commands.ec2.modgroup import ModifySecurityGroupRequest
class Revoke(ModifySecurityGroupRequest):
DESCRIPTION = 'Remove a rule from a security group'
@property
def action(self):
if self.args['egress']:
return 'RevokeSecurityGroupEgress'
else:
return 'RevokeSecurityGroupIngress'
| bsd-2-clause | -4,472,226,785,310,921,700 | 44.837838 | 74 | 0.769458 | false |
adeboisanger/thumbor | tests/filters/test_watermark.py | 5 | 2616 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
from preggy import expect
from tests.base import FilterTestCase
class WatermarkFilterTestCase(FilterTestCase):
def test_watermark_filter_centered(self):
image = self.get_filtered('source100.jpg', 'thumbor.filters.watermark', 'watermark(watermark.png,center,center,60)')
expected = self.get_fixture('watermarkCenter.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.99)
def test_watermark_filter_centered_x(self):
image = self.get_filtered('source100.jpg', 'thumbor.filters.watermark', 'watermark(watermark.png,center,40,20)')
expected = self.get_fixture('watermarkCenterX.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.99)
def test_watermark_filter_centered_y(self):
image = self.get_filtered('source100.jpg', 'thumbor.filters.watermark', 'watermark(watermark.png,80,center,50)')
expected = self.get_fixture('watermarkCenterY.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.99)
def test_watermark_filter_repeated(self):
image = self.get_filtered('source100.jpg', 'thumbor.filters.watermark', 'watermark(watermark.png,repeat,repeat,70)')
expected = self.get_fixture('watermarkRepeat.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.92)
def test_watermark_filter_repeated_x(self):
image = self.get_filtered('source100.jpg', 'thumbor.filters.watermark', 'watermark(watermark.png,repeat,center,70)')
expected = self.get_fixture('watermarkRepeatX.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.94)
def test_watermark_filter_repeated_y(self):
image = self.get_filtered('source100.jpg', 'thumbor.filters.watermark', 'watermark(watermark.png,30,repeat,30)')
expected = self.get_fixture('watermarkRepeatY.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.94)
def test_watermark_filter_simple(self):
image = self.get_filtered('source100.jpg', 'thumbor.filters.watermark', 'watermark(watermark.png,30,-50,60)')
expected = self.get_fixture('watermarkSimple.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.99)
| mit | 1,533,809,273,229,052,200 | 44.894737 | 124 | 0.689602 | false |
jjgoings/McMurchie-Davidson | mmd/realtime.py | 1 | 7265 | from __future__ import division
from __future__ import print_function
import numpy as np
from scipy.linalg import expm
class RealTime(object):
"""Class for real-time routines"""
def __init__(self,mol,numsteps=1000,stepsize=0.1,field=0.0001,pulse=None):
self.mol = mol
self.field = field
self.stepsize = stepsize
self.numSteps = numsteps
self.time = np.arange(0,self.numSteps)*self.stepsize
if pulse:
self.pulse = pulse
else:
# zero pulse envelope
self.pulse = lambda t: 0.0
self.reset()
def reset(self):
"""Reset all time-dependent property arrays to empty, will also
re-do the SCF in order to set the reference back to ground state.
This will likely need to be changed in the future.
"""
self.mol.RHF(doPrint=False)
self.dipole = []
self.angmom = []
self.Energy = []
self.shape = []
def Magnus2(self,direction='x'):
"""Propagate in time using the second order explicit Magnus.
See: Blanes, Sergio, and Fernando Casas. A concise introduction
to geometric numerical integration. Vol. 23. CRC Press, 2016.
Magnus2 is Eq (4.61), page 128.
"""
self.reset()
self.mol.orthoDen()
self.mol.orthoFock()
h = -1j*self.stepsize
for idx,time in enumerate((self.time)):
if direction.lower() == 'x':
self.mol.computeDipole()
self.dipole.append(np.real(self.mol.mu[0]))
elif direction.lower() == 'y':
self.mol.computeDipole()
self.dipole.append(np.real(self.mol.mu[1]))
elif direction.lower() == 'z':
self.mol.computeDipole()
self.dipole.append(np.real(self.mol.mu[2]))
# record pulse envelope for later plotting, etc.
self.shape.append(self.pulse(time))
curDen = np.copy(self.mol.PO)
self.addField(time + 0.0*self.stepsize,direction=direction)
k1 = h*self.mol.FO
U = expm(k1)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
self.addField(time + 1.0*self.stepsize,direction=direction)
L = 0.5*(k1 + h*self.mol.FO)
U = expm(L)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
# density and Fock are done updating, wrap things up
self.mol.unOrthoFock()
self.mol.unOrthoDen()
self.mol.computeEnergy()
self.Energy.append(np.real(self.mol.energy))
def Magnus4(self,direction='x'):
"""Propagate in time using the fourth order explicit Magnus.
See: Blanes, Sergio, and Fernando Casas. A concise introduction
to geometric numerical integration. Vol. 23. CRC Press, 2016.
Magnus4 is Eq (4.62), page 128.
"""
self.reset()
self.mol.orthoDen()
self.mol.orthoFock()
h = -1j*self.stepsize
for idx,time in enumerate((self.time)):
if direction.lower() == 'x':
self.mol.computeDipole()
self.dipole.append(np.real(self.mol.mu[0]))
elif direction.lower() == 'y':
self.mol.computeDipole()
self.dipole.append(np.real(self.mol.mu[1]))
elif direction.lower() == 'z':
self.mol.computeDipole()
self.dipole.append(np.real(self.mol.mu[2]))
# record pulse envelope for later plotting, etc.
self.shape.append(self.pulse(time))
curDen = np.copy(self.mol.PO)
self.addField(time + 0.0*self.stepsize,direction=direction)
k1 = h*self.mol.FO
Q1 = k1
U = expm(0.5*Q1)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
self.addField(time + 0.5*self.stepsize,direction=direction)
k2 = h*self.mol.FO
Q2 = k2 - k1
U = expm(0.5*Q1 + 0.25*Q2)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
self.addField(time + 0.5*self.stepsize,direction=direction)
k3 = h*self.mol.FO
Q3 = k3 - k2
U = expm(Q1 + Q2)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
self.addField(time + 1.0*self.stepsize,direction=direction)
k4 = h*self.mol.FO
Q4 = k4 - 2*k2 + k1
L = 0.5*Q1 + 0.25*Q2 + (1/3.)*Q3 - (1/24.)*Q4
L += -(1/48.)*self.mol.comm(Q1,Q2)
U = expm(L)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
self.addField(time + 0.5*self.stepsize,direction=direction)
k5 = h*self.mol.FO
Q5 = k5 - k2
L = Q1 + Q2 + (2/3.)*Q3 + (1/6.)*Q4 - (1/6.)*self.mol.comm(Q1,Q2)
U = expm(L)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
self.addField(time + 1.0*self.stepsize,direction=direction)
k6 = h*self.mol.FO
Q6 = k6 -2*k2 + k1
L = Q1 + Q2 + (2/3.)*Q5 + (1/6.)*Q6
L += -(1/6.)*self.mol.comm(Q1, (Q2 - Q3 + Q5 + 0.5*Q6))
U = expm(L)
self.mol.PO = np.dot(U,np.dot(curDen,self.mol.adj(U)))
self.mol.updateFock()
# density and Fock are done updating, wrap things up
self.mol.unOrthoFock()
self.mol.unOrthoDen()
self.mol.computeEnergy()
self.Energy.append(np.real(self.mol.energy))
def addField(self,time,direction='x'):
""" Add the electric dipole contribution to the Fock matrix,
and then orthogonalize the results. The envelope (shape) of
the interaction with the electric field (self.pulse) needs
to be set externally in a job, since the desired pulse is
specific to each type of realtime simulation.
self.pulse: function of time (t) that returns the envelope
amplitude at a given time.
Example:
def gaussian(t):
envelope = np.exp(-(t**2))
return envelope
rt = RealTime(molecule, pulse=gaussian, field=0.001)
The above example would set up a realtime simulations with
the external field to have the gaussian envelope defined above
scaled by field=0.001.
"""
shape = self.pulse(time)
if direction.lower() == 'x':
self.mol.F += self.field*shape*(self.mol.M[0])
elif direction.lower() == 'y':
self.mol.F += self.field*shape*(self.mol.M[1])
elif direction.lower() == 'z':
self.mol.F += self.field*shape*(self.mol.M[2])
self.mol.orthoFock()
| bsd-3-clause | -2,012,891,293,625,211,100 | 38.05914 | 78 | 0.526497 | false |
tedder/ansible | lib/ansible/modules/network/nxos/nxos_facts.py | 9 | 33660 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: nxos_facts
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Gets facts about NX-OS switches
description:
- Collects facts from Cisco Nexus devices running the NX-OS operating
system. Fact collection is supported over both Cli and Nxapi
transports. This module prepends all of the base network fact keys
with C(ansible_net_<fact>). The facts module will always collect a
base set of facts from the device and can enable or disable
collection of additional facts.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
options:
gather_subset:
description:
- When supplied, this argument will restrict the facts collected
to a given subset. Possible values for this argument include
all, hardware, config, legacy, and interfaces. Can specify a
list of values to include a larger subset. Values can also be used
with an initial C(M(!)) to specify that a specific subset should
not be collected.
required: false
default: '!config'
version_added: "2.2"
"""
EXAMPLES = """
- nxos_facts:
gather_subset: all
# Collect only the config and default facts
- nxos_facts:
gather_subset:
- config
# Do not collect hardware facts
- nxos_facts:
gather_subset:
- "!hardware"
"""
RETURN = """
ansible_net_gather_subset:
description: The list of fact subsets collected from the device
returned: always
type: list
# default
ansible_net_model:
description: The model name returned from the device
returned: always
type: str
ansible_net_serialnum:
description: The serial number of the remote device
returned: always
type: str
ansible_net_version:
description: The operating system version running on the remote device
returned: always
type: str
ansible_net_hostname:
description: The configured hostname of the device
returned: always
type: str
ansible_net_image:
description: The image file the device is running
returned: always
type: str
# hardware
ansible_net_filesystems:
description: All file system names available on the device
returned: when hardware is configured
type: list
ansible_net_memfree_mb:
description: The available free memory on the remote device in Mb
returned: when hardware is configured
type: int
ansible_net_memtotal_mb:
description: The total memory on the remote device in Mb
returned: when hardware is configured
type: int
# config
ansible_net_config:
description: The current active config from the device
returned: when config is configured
type: str
# interfaces
ansible_net_all_ipv4_addresses:
description: All IPv4 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_all_ipv6_addresses:
description: All IPv6 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_interfaces:
description: A hash of all interfaces running on the system
returned: when interfaces is configured
type: dict
ansible_net_neighbors:
description:
- The list of LLDP and CDP neighbors from the device. If both,
CDP and LLDP neighbor data is present on one port, CDP is preferred.
returned: when interfaces is configured
type: dict
# legacy (pre Ansible 2.2)
fan_info:
description: A hash of facts about fans in the remote device
returned: when legacy is configured
type: dict
hostname:
description: The configured hostname of the remote device
returned: when legacy is configured
type: dict
interfaces_list:
description: The list of interface names on the remote device
returned: when legacy is configured
type: dict
kickstart:
description: The software version used to boot the system
returned: when legacy is configured
type: str
module:
description: A hash of facts about the modules in a remote device
returned: when legacy is configured
type: dict
platform:
description: The hardware platform reported by the remote device
returned: when legacy is configured
type: str
power_supply_info:
description: A hash of facts about the power supplies in the remote device
returned: when legacy is configured
type: str
vlan_list:
description: The list of VLAN IDs configured on the remote device
returned: when legacy is configured
type: list
"""
import re
from ansible.module_utils.network.nxos.nxos import run_commands, get_config
from ansible.module_utils.network.nxos.nxos import get_capabilities, get_interface_type
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.network.nxos.nxos import normalize_interface
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import ConnectionError
from ansible.module_utils.six import string_types, iteritems
g_config = None
class FactsBase(object):
def __init__(self, module):
self.module = module
self.warnings = list()
self.facts = dict()
def populate(self):
pass
def run(self, command, output='text'):
command_string = command
command = {
'command': command,
'output': output
}
resp = run_commands(self.module, [command], check_rc='retry_json')
try:
return resp[0]
except IndexError:
self.warnings.append('command %s failed, facts for this command will not be populated' % command_string)
return None
def get_config(self):
global g_config
if not g_config:
g_config = get_config(self.module)
return g_config
def transform_dict(self, data, keymap):
transform = dict()
for key, fact in keymap:
if key in data:
transform[fact] = data[key]
return transform
def transform_iterable(self, iterable, keymap):
for item in iterable:
yield self.transform_dict(item, keymap)
class Default(FactsBase):
VERSION_MAP_7K = frozenset([
('sys_ver_str', 'version'),
('proc_board_id', 'serialnum'),
('chassis_id', 'model'),
('isan_file_name', 'image'),
('host_name', 'hostname')
])
VERSION_MAP = frozenset([
('kickstart_ver_str', 'version'),
('proc_board_id', 'serialnum'),
('chassis_id', 'model'),
('kick_file_name', 'image'),
('host_name', 'hostname')
])
def populate(self):
data = None
data = self.run('show version', output='json')
if data:
if isinstance(data, dict):
if data.get('sys_ver_str'):
self.facts.update(self.transform_dict(data, self.VERSION_MAP_7K))
else:
self.facts.update(self.transform_dict(data, self.VERSION_MAP))
else:
self.facts['version'] = self.parse_version(data)
self.facts['serialnum'] = self.parse_serialnum(data)
self.facts['model'] = self.parse_model(data)
self.facts['image'] = self.parse_image(data)
self.facts['hostname'] = self.parse_hostname(data)
data = self.run('show license host-id')
if data:
self.facts['license_hostid'] = self.parse_license_hostid(data)
def parse_version(self, data):
match = re.search(r'\s+system:\s+version\s*(\S+)', data, re.M)
if match:
return match.group(1)
else:
match = re.search(r'\s+kickstart:\s+version\s*(\S+)', data, re.M)
if match:
return match.group(1)
def parse_serialnum(self, data):
match = re.search(r'Processor Board ID\s*(\S+)', data, re.M)
if match:
return match.group(1)
def parse_model(self, data):
match = re.search(r'Hardware\n\s+cisco\s*(\S+\s+\S+)', data, re.M)
if match:
return match.group(1)
def parse_image(self, data):
match = re.search(r'\s+system image file is:\s*(\S+)', data, re.M)
if match:
return match.group(1)
else:
match = re.search(r'\s+kickstart image file is:\s*(\S+)', data, re.M)
if match:
return match.group(1)
def parse_hostname(self, data):
match = re.search(r'\s+Device name:\s*(\S+)', data, re.M)
if match:
return match.group(1)
def parse_license_hostid(self, data):
match = re.search(r'License hostid: VDH=(.+)$', data, re.M)
if match:
return match.group(1)
class Config(FactsBase):
def populate(self):
super(Config, self).populate()
self.facts['config'] = self.get_config()
class Features(FactsBase):
def populate(self):
super(Features, self).populate()
data = self.get_config()
if data:
features = []
for line in data.splitlines():
if line.startswith('feature'):
features.append(line.replace('feature', '').strip())
self.facts['features_enabled'] = features
class Hardware(FactsBase):
def populate(self):
data = self.run('dir')
if data:
self.facts['filesystems'] = self.parse_filesystems(data)
data = None
data = self.run('show system resources', output='json')
if data:
if isinstance(data, dict):
self.facts['memtotal_mb'] = int(data['memory_usage_total']) / 1024
self.facts['memfree_mb'] = int(data['memory_usage_free']) / 1024
else:
self.facts['memtotal_mb'] = self.parse_memtotal_mb(data)
self.facts['memfree_mb'] = self.parse_memfree_mb(data)
def parse_filesystems(self, data):
return re.findall(r'^Usage for (\S+)//', data, re.M)
def parse_memtotal_mb(self, data):
match = re.search(r'(\S+)K(\s+|)total', data, re.M)
if match:
memtotal = match.group(1)
return int(memtotal) / 1024
def parse_memfree_mb(self, data):
match = re.search(r'(\S+)K(\s+|)free', data, re.M)
if match:
memfree = match.group(1)
return int(memfree) / 1024
class Interfaces(FactsBase):
INTERFACE_MAP = frozenset([
('state', 'state'),
('desc', 'description'),
('eth_bw', 'bandwidth'),
('eth_duplex', 'duplex'),
('eth_speed', 'speed'),
('eth_mode', 'mode'),
('eth_hw_addr', 'macaddress'),
('eth_mtu', 'mtu'),
('eth_hw_desc', 'type')
])
INTERFACE_SVI_MAP = frozenset([
('svi_line_proto', 'state'),
('svi_bw', 'bandwidth'),
('svi_mac', 'macaddress'),
('svi_mtu', 'mtu'),
('type', 'type')
])
INTERFACE_IPV4_MAP = frozenset([
('eth_ip_addr', 'address'),
('eth_ip_mask', 'masklen')
])
INTERFACE_SVI_IPV4_MAP = frozenset([
('svi_ip_addr', 'address'),
('svi_ip_mask', 'masklen')
])
INTERFACE_IPV6_MAP = frozenset([
('addr', 'address'),
('prefix', 'subnet')
])
def ipv6_structure_op_supported(self):
data = get_capabilities(self.module)
if data:
nxos_os_version = data['device_info']['network_os_version']
unsupported_versions = ['I2', 'F1', 'A8']
for ver in unsupported_versions:
if ver in nxos_os_version:
return False
return True
def populate(self):
self.facts['all_ipv4_addresses'] = list()
self.facts['all_ipv6_addresses'] = list()
self.facts['neighbors'] = {}
data = None
data = self.run('show interface', output='json')
if data:
if isinstance(data, dict):
self.facts['interfaces'] = self.populate_structured_interfaces(data)
else:
interfaces = self.parse_interfaces(data)
self.facts['interfaces'] = self.populate_interfaces(interfaces)
if self.ipv6_structure_op_supported():
data = self.run('show ipv6 interface', output='json')
else:
data = None
if data:
if isinstance(data, dict):
self.populate_structured_ipv6_interfaces(data)
else:
interfaces = self.parse_interfaces(data)
self.populate_ipv6_interfaces(interfaces)
data = self.run('show lldp neighbors', output='json')
if data:
if isinstance(data, dict):
self.facts['neighbors'].update(self.populate_structured_neighbors_lldp(data))
else:
self.facts['neighbors'].update(self.populate_neighbors(data))
data = self.run('show cdp neighbors detail', output='json')
if data:
if isinstance(data, dict):
self.facts['neighbors'].update(self.populate_structured_neighbors_cdp(data))
else:
self.facts['neighbors'].update(self.populate_neighbors_cdp(data))
self.facts['neighbors'].pop(None, None) # Remove null key
def populate_structured_interfaces(self, data):
interfaces = dict()
for item in data['TABLE_interface']['ROW_interface']:
name = item['interface']
intf = dict()
if 'type' in item:
intf.update(self.transform_dict(item, self.INTERFACE_SVI_MAP))
else:
intf.update(self.transform_dict(item, self.INTERFACE_MAP))
if 'eth_ip_addr' in item:
intf['ipv4'] = self.transform_dict(item, self.INTERFACE_IPV4_MAP)
self.facts['all_ipv4_addresses'].append(item['eth_ip_addr'])
if 'svi_ip_addr' in item:
intf['ipv4'] = self.transform_dict(item, self.INTERFACE_SVI_IPV4_MAP)
self.facts['all_ipv4_addresses'].append(item['svi_ip_addr'])
interfaces[name] = intf
return interfaces
def populate_structured_ipv6_interfaces(self, data):
try:
data = data['TABLE_intf']
if data:
if isinstance(data, dict):
data = [data]
for item in data:
name = item['ROW_intf']['intf-name']
intf = self.facts['interfaces'][name]
intf['ipv6'] = self.transform_dict(item, self.INTERFACE_IPV6_MAP)
try:
addr = item['ROW_intf']['addr']
except KeyError:
addr = item['ROW_intf']['TABLE_addr']['ROW_addr']['addr']
self.facts['all_ipv6_addresses'].append(addr)
else:
return ""
except TypeError:
return ""
def populate_structured_neighbors_lldp(self, data):
objects = dict()
data = data['TABLE_nbor']['ROW_nbor']
if isinstance(data, dict):
data = [data]
for item in data:
local_intf = normalize_interface(item['l_port_id'])
objects[local_intf] = list()
nbor = dict()
nbor['port'] = item['port_id']
nbor['host'] = nbor['sysname'] = item['chassis_id']
objects[local_intf].append(nbor)
return objects
def populate_structured_neighbors_cdp(self, data):
objects = dict()
data = data['TABLE_cdp_neighbor_detail_info']['ROW_cdp_neighbor_detail_info']
if isinstance(data, dict):
data = [data]
for item in data:
local_intf = item['intf_id']
objects[local_intf] = list()
nbor = dict()
nbor['port'] = item['port_id']
nbor['host'] = nbor['sysname'] = item['device_id']
objects[local_intf].append(nbor)
return objects
def parse_interfaces(self, data):
parsed = dict()
key = ''
for line in data.split('\n'):
if len(line) == 0:
continue
elif line.startswith('admin') or line[0] == ' ':
parsed[key] += '\n%s' % line
else:
match = re.match(r'^(\S+)', line)
if match:
key = match.group(1)
if not key.startswith('admin') or not key.startswith('IPv6 Interface'):
parsed[key] = line
return parsed
def populate_interfaces(self, interfaces):
facts = dict()
for key, value in iteritems(interfaces):
intf = dict()
if get_interface_type(key) == 'svi':
intf['state'] = self.parse_state(key, value, intf_type='svi')
intf['macaddress'] = self.parse_macaddress(value, intf_type='svi')
intf['mtu'] = self.parse_mtu(value, intf_type='svi')
intf['bandwidth'] = self.parse_bandwidth(value, intf_type='svi')
intf['type'] = self.parse_type(value, intf_type='svi')
if 'Internet Address' in value:
intf['ipv4'] = self.parse_ipv4_address(value, intf_type='svi')
facts[key] = intf
else:
intf['state'] = self.parse_state(key, value)
intf['description'] = self.parse_description(value)
intf['macaddress'] = self.parse_macaddress(value)
intf['mode'] = self.parse_mode(value)
intf['mtu'] = self.parse_mtu(value)
intf['bandwidth'] = self.parse_bandwidth(value)
intf['duplex'] = self.parse_duplex(value)
intf['speed'] = self.parse_speed(value)
intf['type'] = self.parse_type(value)
if 'Internet Address' in value:
intf['ipv4'] = self.parse_ipv4_address(value)
facts[key] = intf
return facts
def parse_state(self, key, value, intf_type='ethernet'):
match = None
if intf_type == 'svi':
match = re.search(r'line protocol is\s*(\S+)', value, re.M)
else:
match = re.search(r'%s is\s*(\S+)' % key, value, re.M)
if match:
return match.group(1)
def parse_macaddress(self, value, intf_type='ethernet'):
match = None
if intf_type == 'svi':
match = re.search(r'address is\s*(\S+)', value, re.M)
else:
match = re.search(r'address:\s*(\S+)', value, re.M)
if match:
return match.group(1)
def parse_mtu(self, value, intf_type='ethernet'):
match = re.search(r'MTU\s*(\S+)', value, re.M)
if match:
return match.group(1)
def parse_bandwidth(self, value, intf_type='ethernet'):
match = re.search(r'BW\s*(\S+)', value, re.M)
if match:
return match.group(1)
def parse_type(self, value, intf_type='ethernet'):
match = None
if intf_type == 'svi':
match = re.search(r'Hardware is\s*(\S+)', value, re.M)
else:
match = re.search(r'Hardware:\s*(.+),', value, re.M)
if match:
return match.group(1)
def parse_description(self, value, intf_type='ethernet'):
match = re.search(r'Description: (.+)$', value, re.M)
if match:
return match.group(1)
def parse_mode(self, value, intf_type='ethernet'):
match = re.search(r'Port mode is (\S+)', value, re.M)
if match:
return match.group(1)
def parse_duplex(self, value, intf_type='ethernet'):
match = re.search(r'(\S+)-duplex', value, re.M)
if match:
return match.group(1)
def parse_speed(self, value, intf_type='ethernet'):
match = re.search(r'duplex, (.+)$', value, re.M)
if match:
return match.group(1)
def parse_ipv4_address(self, value, intf_type='ethernet'):
ipv4 = {}
match = re.search(r'Internet Address is (.+)$', value, re.M)
if match:
address = match.group(1)
addr = address.split('/')[0]
ipv4['address'] = address.split('/')[0]
ipv4['masklen'] = address.split('/')[1]
self.facts['all_ipv4_addresses'].append(addr)
return ipv4
def populate_neighbors(self, data):
objects = dict()
# if there are no neighbors the show command returns
# ERROR: No neighbour information
if data.startswith('ERROR'):
return dict()
regex = re.compile(r'(\S+)\s+(\S+)\s+\d+\s+\w+\s+(\S+)')
for item in data.split('\n')[4:-1]:
match = regex.match(item)
if match:
nbor = dict()
nbor['host'] = nbor['sysname'] = match.group(1)
nbor['port'] = match.group(3)
local_intf = normalize_interface(match.group(2))
if local_intf not in objects:
objects[local_intf] = []
objects[local_intf].append(nbor)
return objects
def populate_neighbors_cdp(self, data):
facts = dict()
for item in data.split('----------------------------------------'):
if item == '':
continue
local_intf = self.parse_lldp_intf(item)
if local_intf not in facts:
facts[local_intf] = list()
fact = dict()
fact['port'] = self.parse_lldp_port(item)
fact['sysname'] = self.parse_lldp_sysname(item)
facts[local_intf].append(fact)
return facts
def parse_lldp_intf(self, data):
match = re.search(r'Interface:\s*(\S+)', data, re.M)
if match:
return match.group(1).strip(',')
def parse_lldp_port(self, data):
match = re.search(r'Port ID \(outgoing port\):\s*(\S+)', data, re.M)
if match:
return match.group(1)
def parse_lldp_sysname(self, data):
match = re.search(r'Device ID:(.+)$', data, re.M)
if match:
return match.group(1)
def populate_ipv6_interfaces(self, interfaces):
facts = dict()
for key, value in iteritems(interfaces):
intf = dict()
intf['ipv6'] = self.parse_ipv6_address(value)
facts[key] = intf
def parse_ipv6_address(self, value):
ipv6 = {}
match_addr = re.search(r'IPv6 address:\s*(\S+)', value, re.M)
if match_addr:
addr = match_addr.group(1)
ipv6['address'] = addr
self.facts['all_ipv6_addresses'].append(addr)
match_subnet = re.search(r'IPv6 subnet:\s*(\S+)', value, re.M)
if match_subnet:
ipv6['subnet'] = match_subnet.group(1)
return ipv6
class Legacy(FactsBase):
# facts from nxos_facts 2.1
VERSION_MAP = frozenset([
('host_name', '_hostname'),
('kickstart_ver_str', '_os'),
('chassis_id', '_platform')
])
MODULE_MAP = frozenset([
('model', 'model'),
('modtype', 'type'),
('ports', 'ports'),
('status', 'status')
])
FAN_MAP = frozenset([
('fanname', 'name'),
('fanmodel', 'model'),
('fanhwver', 'hw_ver'),
('fandir', 'direction'),
('fanstatus', 'status')
])
POWERSUP_MAP = frozenset([
('psmodel', 'model'),
('psnum', 'number'),
('ps_status', 'status'),
('ps_status_3k', 'status'),
('actual_out', 'actual_output'),
('actual_in', 'actual_in'),
('total_capa', 'total_capacity'),
('input_type', 'input_type'),
('watts', 'watts'),
('amps', 'amps')
])
def populate(self):
data = None
data = self.run('show version', output='json')
if data:
if isinstance(data, dict):
self.facts.update(self.transform_dict(data, self.VERSION_MAP))
else:
self.facts['_hostname'] = self.parse_hostname(data)
self.facts['_os'] = self.parse_os(data)
self.facts['_platform'] = self.parse_platform(data)
data = self.run('show interface', output='json')
if data:
if isinstance(data, dict):
self.facts['_interfaces_list'] = self.parse_structured_interfaces(data)
else:
self.facts['_interfaces_list'] = self.parse_interfaces(data)
data = self.run('show vlan brief', output='json')
if data:
if isinstance(data, dict):
self.facts['_vlan_list'] = self.parse_structured_vlans(data)
else:
self.facts['_vlan_list'] = self.parse_vlans(data)
data = self.run('show module', output='json')
if data:
if isinstance(data, dict):
self.facts['_module'] = self.parse_structured_module(data)
else:
self.facts['_module'] = self.parse_module(data)
data = self.run('show environment fan', output='json')
if data:
if isinstance(data, dict):
self.facts['_fan_info'] = self.parse_structured_fan_info(data)
else:
self.facts['_fan_info'] = self.parse_fan_info(data)
data = self.run('show environment power', output='json')
if data:
if isinstance(data, dict):
self.facts['_power_supply_info'] = self.parse_structured_power_supply_info(data)
else:
self.facts['_power_supply_info'] = self.parse_power_supply_info(data)
def parse_structured_interfaces(self, data):
objects = list()
for item in data['TABLE_interface']['ROW_interface']:
objects.append(item['interface'])
return objects
def parse_structured_vlans(self, data):
objects = list()
data = data['TABLE_vlanbriefxbrief']['ROW_vlanbriefxbrief']
if isinstance(data, dict):
objects.append(data['vlanshowbr-vlanid-utf'])
elif isinstance(data, list):
for item in data:
objects.append(item['vlanshowbr-vlanid-utf'])
return objects
def parse_structured_module(self, data):
data = data['TABLE_modinfo']['ROW_modinfo']
if isinstance(data, dict):
data = [data]
objects = list(self.transform_iterable(data, self.MODULE_MAP))
return objects
def parse_structured_fan_info(self, data):
objects = list()
if data.get('fandetails'):
data = data['fandetails']['TABLE_faninfo']['ROW_faninfo']
elif data.get('fandetails_3k'):
data = data['fandetails_3k']['TABLE_faninfo']['ROW_faninfo']
else:
return objects
objects = list(self.transform_iterable(data, self.FAN_MAP))
return objects
def parse_structured_power_supply_info(self, data):
if data.get('powersup').get('TABLE_psinfo_n3k'):
fact = data['powersup']['TABLE_psinfo_n3k']['ROW_psinfo_n3k']
else:
if isinstance(data['powersup']['TABLE_psinfo'], list):
fact = []
for i in data['powersup']['TABLE_psinfo']:
fact.append(i['ROW_psinfo'])
else:
fact = data['powersup']['TABLE_psinfo']['ROW_psinfo']
objects = list(self.transform_iterable(fact, self.POWERSUP_MAP))
return objects
def parse_hostname(self, data):
match = re.search(r'\s+Device name:\s+(\S+)', data, re.M)
if match:
return match.group(1)
def parse_os(self, data):
match = re.search(r'\s+system:\s+version\s*(\S+)', data, re.M)
if match:
return match.group(1)
else:
match = re.search(r'\s+kickstart:\s+version\s*(\S+)', data, re.M)
if match:
return match.group(1)
def parse_platform(self, data):
match = re.search(r'Hardware\n\s+cisco\s+(\S+\s+\S+)', data, re.M)
if match:
return match.group(1)
def parse_interfaces(self, data):
objects = list()
for line in data.split('\n'):
if len(line) == 0:
continue
elif line.startswith('admin') or line[0] == ' ':
continue
else:
match = re.match(r'^(\S+)', line)
if match:
intf = match.group(1)
if get_interface_type(intf) != 'unknown':
objects.append(intf)
return objects
def parse_vlans(self, data):
objects = list()
for line in data.splitlines():
if line == '':
continue
if line[0].isdigit():
vlan = line.split()[0]
objects.append(vlan)
return objects
def parse_module(self, data):
objects = list()
for line in data.splitlines():
if line == '':
break
if line[0].isdigit():
obj = {}
match_port = re.search(r'\d\s*(\d*)', line, re.M)
if match_port:
obj['ports'] = match_port.group(1)
match = re.search(r'\d\s*\d*\s*(.+)$', line, re.M)
if match:
l = match.group(1).split(' ')
items = list()
for item in l:
if item == '':
continue
items.append(item.strip())
if items:
obj['type'] = items[0]
obj['model'] = items[1]
obj['status'] = items[2]
objects.append(obj)
return objects
def parse_fan_info(self, data):
objects = list()
for l in data.splitlines():
if '-----------------' in l or 'Status' in l:
continue
line = l.split()
if len(line) > 1:
obj = {}
obj['name'] = line[0]
obj['model'] = line[1]
obj['hw_ver'] = line[-2]
obj['status'] = line[-1]
objects.append(obj)
return objects
def parse_power_supply_info(self, data):
objects = list()
for l in data.splitlines():
if l == '':
break
if l[0].isdigit():
obj = {}
line = l.split()
obj['model'] = line[1]
obj['number'] = line[0]
obj['status'] = line[-1]
objects.append(obj)
return objects
FACT_SUBSETS = dict(
default=Default,
legacy=Legacy,
hardware=Hardware,
interfaces=Interfaces,
config=Config,
features=Features
)
VALID_SUBSETS = frozenset(FACT_SUBSETS.keys())
def main():
spec = dict(
gather_subset=dict(default=['!config'], type='list')
)
spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
gather_subset = module.params['gather_subset']
runable_subsets = set()
exclude_subsets = set()
for subset in gather_subset:
if subset == 'all':
runable_subsets.update(VALID_SUBSETS)
continue
if subset.startswith('!'):
subset = subset[1:]
if subset == 'all':
exclude_subsets.update(VALID_SUBSETS)
continue
exclude = True
else:
exclude = False
if subset not in VALID_SUBSETS:
module.fail_json(msg='Bad subset')
if exclude:
exclude_subsets.add(subset)
else:
runable_subsets.add(subset)
if not runable_subsets:
runable_subsets.update(VALID_SUBSETS)
runable_subsets.difference_update(exclude_subsets)
runable_subsets.add('default')
facts = dict()
facts['gather_subset'] = list(runable_subsets)
instances = list()
for key in runable_subsets:
instances.append(FACT_SUBSETS[key](module))
for inst in instances:
inst.populate()
facts.update(inst.facts)
warnings.extend(inst.warnings)
ansible_facts = dict()
for key, value in iteritems(facts):
# this is to maintain capability with nxos_facts 2.1
if key.startswith('_'):
ansible_facts[key[1:]] = value
else:
key = 'ansible_net_%s' % key
ansible_facts[key] = value
module.exit_json(ansible_facts=ansible_facts, warnings=warnings)
if __name__ == '__main__':
main()
| gpl-3.0 | -6,398,832,401,995,222,000 | 31.396535 | 116 | 0.555318 | false |
radiojam11/connectthedots | Devices/GatewayConnectedDevices/BtUSB_2_BtUART_Example/BtUSB_2_BtUART_Example.py | 16 | 5727 | '''
Copyright (c) Microsoft Open Technologies, Inc. All rights reserved.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
Code to read data from a sensor which uses Bluetooth to transmit data, then augment and format as JSON to send via socket connection to a gateway.
Example of sending data to Microsoft Azure and analyzing with Azure Stream Analytics or Azure Machine Learning.
Real time output viewable at http://connectthedots.msopentech.com .
'''
import bluetooth
import sys
import socket
import time
import datetime
#SensorSubject = "distanceMeasurer" # determines how Azure website will chart the data
Org = "My organization";
Disp = "Bluetooth example" # will be the label for the curve on the chart
GUID = "nnnnnnnn-nnnn-nnnn-nnnn-nnnnnnnnnnnn" # ensures all the data from this sensor appears on the same chart. You can use the Tools/Create GUID in Visual Studio to create
Locn = "here";
Measure = "measure";
Units = "units";
HOST = '127.0.0.1'
PORT = 5000
BT_PORT = 1
BT_PACKET_LEN = 2
BT_DEV_ADDR = "20:14:10:10:14:17" # Please set needed MAC address
BT_SOCK_TIMEOUT = 10
CONNECT_RETRY_INTERVAL = 2
def connectSockets(bt, gatewaySock):
# Connect BT first
while bt == None:
print "Connection RFCOMM"
try:
bt = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
bt.connect((BT_DEV_ADDR, BT_PORT));
print ("BT connection succeded")
except socket.error as msg:
bt = None
print("Socket connection failed. Error Code : " + str(msg[0]) + " Message " + msg[1])
time.sleep(CONNECT_RETRY_INTERVAL)
while gatewaySock == None:
print "Connecting TCP"
try:
gatewaySock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
gatewaySock.connect((HOST, PORT));
print ("Connection to gateway succeded")
except socket.error as msg:
gatewaySock = None
print("Socket connection failed. Error Code : " + str(msg[0]) + " Message " + msg[1])
time.sleep(CONNECT_RETRY_INTERVAL)
return bt, gatewaySock
def recvDataFromBT(bt, packetLen):
btData = 0
i = 0
while(i < packetLen):
byteAsStr = bt.recv(1)
if (byteAsStr == ''):
break
byte = ord(byteAsStr)
btData = (btData << 8) + byte
i = i + 1
return str(btData)
bt = None
s = None
while True:
bt, s = connectSockets(bt, s)
btData = None
# btData != "" means remote host is down
while btData == None:
wasExceptionOccured = 0
try:
btData = recvDataFromBT(bt, BT_PACKET_LEN);
except socket.error as sockErr:
print(sockErr)
try:
s.close()
except Exception as msg:
print(msg[0])
wasExceptionOccured = 1
if (wasExceptionOccured == 1 or btData == ''):
# something went wrong, reconnect bluetooth socket
btData = None
bt = None
bt = connectSockets(bt,s)
timeStr = datetime.datetime.utcnow().isoformat()
JSONdata = "{\"value\":"+btData+",\"guid\":\""+GUID+"\",\"organization\":\""+Org+"\",\"displayname\":\""+Disp +"\",\"unitofmeasure\":\""+Units+"\",\"measurename\":\""+Measure+"\",\"location\":\""+Locn+"\",\"timecreated\":\""+timeStr+"\"}"
print(JSONdata)
wasExceptionOccured = 0
try:
# send to gateway over socket interface
bytesNeedToBeSent = len(JSONdata)
bytesSent = 0
while(bytesSent < bytesNeedToBeSent):
bytesSent = bytesSent + s.send("<" + JSONdata + ">")
# TODO check if all bytes sent. Sent again if necessary.
except Exception as msg:
print(msg[0])
try:
s.close()
except Exception as msg:
print(msg[0])
wasExceptionOccured = 1
if (wasExceptionOccured == 1):
# something went wrong, reconnect gateway socket
s = None
print "gateway socket exception occured"
bt,s = connectSockets(bt,s)
time.sleep(1)
# will never get here, unless server dies
try:
s.close()
except Exception as msg:
# eat all exception and go back to connect loop
print(msg[0])
try:
bt.close()
except Exception as msg:
# eat all exception and go back to connect loop
print(msg[0])
| mit | 6,806,571,821,409,369,000 | 38.496552 | 246 | 0.607997 | false |
cloudcache/zstack-utility | zstacklib/zstacklib/utils/salt.py | 3 | 3851 | '''
@author: YYK
'''
import shell
import ssh
import os.path
import log
import subprocess
import lock
import time
import json
logger = log.get_logger(__name__)
class SaltError(Exception):
'''salt error'''
def prepare_salt_state(state_path, salt_state_path='/srv/salt'):
try:
subprocess.call(['salt', '--version'])
except Exception as e:
print "Execute `salt --version` failed. Probably there isn't salt installed"
raise e
if not os.path.exists(salt_state_path):
os.makedirs(salt_state_path, 0755)
shell.call('rm -rf %s' % os.path.join(salt_state_path, os.path.basename(state_path)))
shell.call('cp -r %s %s' % (state_path, salt_state_path))
def is_salt_failed(salt_json_output):
json_data = json.loads(salt_json_output)
if isinstance(json_data, list):
return True
if isinstance(json_data, dict):
for value in json_data.values():
if isinstance(value, dict):
for item in value.values():
if item.has_key('result'):
if item['result'] == False:
return True
elif value == False:
return True
elif isinstance(value, list):
return True
return False
def execute_salt_state(hostname, username, password, state_name, master_name, machine_id=None):
with lock.FileLock(hostname):
ssh.execute('''ip=`env | grep SSH_CLIENT | cut -d '=' -f 2 | cut -d ' ' -f 1`; [ $ip == ::1 ] && ip=127.0.0.1; sed -i "/%s/d" /etc/hosts; sed -i "/$ip/d" /etc/hosts; echo "$ip %s" >> /etc/hosts''' % (master_name, master_name), hostname, username, password)
if not machine_id:
(retcode, machine_id, err) = ssh.execute('cat /sys/class/dmi/id/product_uuid', hostname, username, password, exception_if_error=False)
if not machine_id:
raise SaltError("Can't find machine-id on %s" % hostname)
machine_id = machine_id.strip()
if not wait_for_salt_minion_daemon(machine_id, 1, False):
ssh.execute('which salt-minion; [ $? -ne 0 ] && curl -L http://bootstrap.saltstack.org | sudo sh ;sed -i "^id/d" /etc/salt/minion; sed -i "^master/d" /etc/salt/minion; echo "id: %s" >>/etc/salt/minion; echo "master: %s" >> /etc/salt/minion; rm -f /etc/salt/pki/minion/minion_master.pub ; service salt-minion restart' % (machine_id, master_name), hostname, username, password, exception_if_error=False)
wait_for_salt_minion_daemon(machine_id)
print 'salt %s %s' % (machine_id, state_name)
output = shell.call('salt --out=json %s %s' % (machine_id, state_name))
if not is_salt_failed(output):
print '%s' % output
print "salt has deployed %s" % state_name
else:
raise SaltError('salt execution failure: %s' % output)
#need wait for a while for salt_minion to register into master, after its service is restarted.
def wait_for_salt_minion_daemon(salt_minion_id, timeout_times=10, exception=True):
def _salt_ping():
cmd = shell.ShellCmd('salt -t 1 --out=json %s test.ping' % salt_minion_id)
cmd(False)
return cmd.return_code == 0 and cmd.stdout != ''
import time
while timeout_times > 0:
if _salt_ping():
return True
time.sleep(1)
timeout_times -= 1
print 'Wait for salt minion: %s registration to master' % salt_minion_id
else:
print 'Command fail: `salt %s test.ping`' % salt_minion_id
if exception:
raise SaltError('Salt minion daemon: %s failed to register to master, after trying %s times.' % (salt_minion_id, timeout_times))
else:
return False
| apache-2.0 | 2,149,440,955,060,939,000 | 38.968085 | 413 | 0.586601 | false |
aweinstock314/aweinstock-programming-competition-solutions | upe_competition_2017_04_09/solutions/solve_dining.py | 1 | 1200 | #!/usr/bin/env python
foods = []
try:
while True:
tmp = raw_input().rstrip().split(':')
categories = tmp[1].split(',')
name, price = tmp[0].split(',')
price = float(price)
foods.append([name, price, set(categories)])
except EOFError:
pass
#print foods
all_flavors = set(['salty', 'sweet', 'bitter', 'sour', 'spicy', 'umami'])
def greedy_main():
by_price = list(sorted(foods, key=lambda x:x[1]))
#print by_price
cost = 0.0
foods = []
flavors_so_far = set()
for food in by_price:
if all_flavors.issubset(flavors_so_far):
#print foods
print('%.2f' % (cost,))
break
cost += food[1]
foods.append(food[0])
flavors_so_far.update(food[2])
def dynprog(cost, flavors_so_far, foods):
if all_flavors.issubset(flavors_so_far):
return cost
subcosts = set()
for food in foods:
subfoods = set(foods)
subfoods.remove(food)
subcosts.add(dynprog(cost+food[1], flavors_so_far.union(set(food[2])), subfoods))
return min(subcosts)
print('%.2f' % (dynprog(0.0, set(), set([((), food[1], tuple(food[2])) for food in foods])),))
| agpl-3.0 | 2,157,975,292,861,958,700 | 25.666667 | 94 | 0.564167 | false |
lcnodc/codes | 09-revisao/practice_python/tic_tac_toe_draw.py | 1 | 3900 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Exercise 27: Tic Tac Toe Draw
This exercise is Part 3 of 4 of the Tic Tac Toe exercise series. The
other exercises are: Part 1, Part 2, and Part 4.
In a previous exercise we explored the idea of using a list of lists as
a “data structure” to store information about a tic tac toe game. In a
tic tac toe game, the “game server” needs to know where the Xs and Os
are in the board, to know whether player 1 or player 2 (or whoever is X
and O won).
There has also been an exercise about drawing the actual tic tac toe
gameboard using text characters.
The next logical step is to deal with handling user input. When a player
(say player 1, who is X) wants to place an X on the screen, they can’t
just click on a terminal. So we are going to approximate this clicking
simply by asking the user for a coordinate of where they want to place
their piece.
As a reminder, our tic tac toe game is really a list of lists. The game
starts out with an empty game board like this:
game = [[0, 0, 0],
[0, 0, 0],
[0, 0, 0]]
The computer asks Player 1 (X) what their move is (in the format
row,col), and say they type 1,3. Then the game would print out
game = [[0, 0, X],
[0, 0, 0],
[0, 0, 0]]
And ask Player 2 for their move, printing an O in that place.
Things to note:
For this exercise, assume that player 1 (the first player to move) will
always be X and player 2 (the second player) will always be O.
Notice how in the example I gave coordinates for where I want to move
starting from (1, 1) instead of (0, 0). To people who don’t program,
starting to count at 0 is a strange concept, so it is better for the
user experience if the row counts and column counts start at 1. This is
not required, but whichever way you choose to implement this, it should
be explained to the player.
Ask the user to enter coordinates in the form “row,col” - a number, then
a comma, then a number. Then you can use your Python skills to figure
out which row and column they want their piece to be in.
Don’t worry about checking whether someone won the game, but if a player
tries to put a piece in a game position where there already is another
piece, do not allow the piece to go there.
Bonus:
For the “standard” exercise, don’t worry about “ending” the game - no
need to keep track of how many squares are full. In a bonus version,
keep track of how many squares are full and automatically stop asking
for moves when there are no more valid moves.
"""
import random
def draw_top(columns, row):
print(" ---" * columns if row == 0 else "", end="")
def draw_middle(columns, row, pieces):
print()
for column in range(columns):
piece = pieces.get(str(row + 1) + "," + str(column + 1), "-")
print("|" + " " + piece + " ", end="")
print("|")
def draw_bottom(columns):
print((" ---" * columns), end="")
def draw_board(rows, columns, pieces):
for row in range(rows):
draw_top(columns, row)
draw_middle(columns, row, pieces)
draw_bottom(columns)
print()
def get_position(piece):
if piece == 'X':
return input("USER: Enter with a position in board: ")
if piece == 'O':
position = str(random.randint(1, 3)) + "," + str(random.randint(1, 3))
print("CPU: Enter with a position in board:", position)
return position
if __name__ == "__main__":
rows = 3
columns = 3
pieces = dict()
busy_positions = 0
piece = "X"
while busy_positions < (rows * columns):
position = get_position(piece)
if position in pieces.keys():
print("Position busy, choose other...")
else:
busy_positions += 1
pieces[position] = piece
piece = "O" if piece == "X" else "X"
draw_board(rows, columns, pieces)
| mit | 4,072,697,720,786,208,300 | 32.094017 | 78 | 0.662965 | false |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/scipy/stats/tests/test_morestats.py | 7 | 55766 | # Author: Travis Oliphant, 2002
#
# Further enhancements and tests added by numerous SciPy developers.
#
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from numpy.random import RandomState
from numpy.testing import (assert_array_equal,
assert_almost_equal, assert_array_less, assert_array_almost_equal,
assert_, assert_allclose, assert_equal, assert_warns)
import pytest
from pytest import raises as assert_raises
from scipy._lib._numpy_compat import suppress_warnings
from scipy import stats
from .common_tests import check_named_results
# Matplotlib is not a scipy dependency but is optionally used in probplot, so
# check if it's available
try:
import matplotlib.pyplot as plt
have_matplotlib = True
except:
have_matplotlib = False
g1 = [1.006, 0.996, 0.998, 1.000, 0.992, 0.993, 1.002, 0.999, 0.994, 1.000]
g2 = [0.998, 1.006, 1.000, 1.002, 0.997, 0.998, 0.996, 1.000, 1.006, 0.988]
g3 = [0.991, 0.987, 0.997, 0.999, 0.995, 0.994, 1.000, 0.999, 0.996, 0.996]
g4 = [1.005, 1.002, 0.994, 1.000, 0.995, 0.994, 0.998, 0.996, 1.002, 0.996]
g5 = [0.998, 0.998, 0.982, 0.990, 1.002, 0.984, 0.996, 0.993, 0.980, 0.996]
g6 = [1.009, 1.013, 1.009, 0.997, 0.988, 1.002, 0.995, 0.998, 0.981, 0.996]
g7 = [0.990, 1.004, 0.996, 1.001, 0.998, 1.000, 1.018, 1.010, 0.996, 1.002]
g8 = [0.998, 1.000, 1.006, 1.000, 1.002, 0.996, 0.998, 0.996, 1.002, 1.006]
g9 = [1.002, 0.998, 0.996, 0.995, 0.996, 1.004, 1.004, 0.998, 0.999, 0.991]
g10 = [0.991, 0.995, 0.984, 0.994, 0.997, 0.997, 0.991, 0.998, 1.004, 0.997]
class TestBayes_mvs(object):
def test_basic(self):
# Expected values in this test simply taken from the function. For
# some checks regarding correctness of implementation, see review in
# gh-674
data = [6, 9, 12, 7, 8, 8, 13]
mean, var, std = stats.bayes_mvs(data)
assert_almost_equal(mean.statistic, 9.0)
assert_allclose(mean.minmax, (7.1036502226125329, 10.896349777387467),
rtol=1e-14)
assert_almost_equal(var.statistic, 10.0)
assert_allclose(var.minmax, (3.1767242068607087, 24.45910381334018),
rtol=1e-09)
assert_almost_equal(std.statistic, 2.9724954732045084, decimal=14)
assert_allclose(std.minmax, (1.7823367265645145, 4.9456146050146312),
rtol=1e-14)
def test_empty_input(self):
assert_raises(ValueError, stats.bayes_mvs, [])
def test_result_attributes(self):
x = np.arange(15)
attributes = ('statistic', 'minmax')
res = stats.bayes_mvs(x)
for i in res:
check_named_results(i, attributes)
class TestMvsdist(object):
def test_basic(self):
data = [6, 9, 12, 7, 8, 8, 13]
mean, var, std = stats.mvsdist(data)
assert_almost_equal(mean.mean(), 9.0)
assert_allclose(mean.interval(0.9), (7.1036502226125329,
10.896349777387467), rtol=1e-14)
assert_almost_equal(var.mean(), 10.0)
assert_allclose(var.interval(0.9), (3.1767242068607087,
24.45910381334018), rtol=1e-09)
assert_almost_equal(std.mean(), 2.9724954732045084, decimal=14)
assert_allclose(std.interval(0.9), (1.7823367265645145,
4.9456146050146312), rtol=1e-14)
def test_empty_input(self):
assert_raises(ValueError, stats.mvsdist, [])
def test_bad_arg(self):
# Raise ValueError if fewer than two data points are given.
data = [1]
assert_raises(ValueError, stats.mvsdist, data)
def test_warns(self):
# regression test for gh-5270
# make sure there are no spurious divide-by-zero warnings
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
[x.mean() for x in stats.mvsdist([1, 2, 3])]
[x.mean() for x in stats.mvsdist([1, 2, 3, 4, 5])]
class TestShapiro(object):
def test_basic(self):
x1 = [0.11, 7.87, 4.61, 10.14, 7.95, 3.14, 0.46,
4.43, 0.21, 4.75, 0.71, 1.52, 3.24,
0.93, 0.42, 4.97, 9.53, 4.55, 0.47, 6.66]
w, pw = stats.shapiro(x1)
assert_almost_equal(w, 0.90047299861907959, 6)
assert_almost_equal(pw, 0.042089745402336121, 6)
x2 = [1.36, 1.14, 2.92, 2.55, 1.46, 1.06, 5.27, -1.11,
3.48, 1.10, 0.88, -0.51, 1.46, 0.52, 6.20, 1.69,
0.08, 3.67, 2.81, 3.49]
w, pw = stats.shapiro(x2)
assert_almost_equal(w, 0.9590270, 6)
assert_almost_equal(pw, 0.52460, 3)
# Verified against R
np.random.seed(12345678)
x3 = stats.norm.rvs(loc=5, scale=3, size=100)
w, pw = stats.shapiro(x3)
assert_almost_equal(w, 0.9772805571556091, decimal=6)
assert_almost_equal(pw, 0.08144091814756393, decimal=3)
# Extracted from original paper
x4 = [0.139, 0.157, 0.175, 0.256, 0.344, 0.413, 0.503, 0.577, 0.614,
0.655, 0.954, 1.392, 1.557, 1.648, 1.690, 1.994, 2.174, 2.206,
3.245, 3.510, 3.571, 4.354, 4.980, 6.084, 8.351]
W_expected = 0.83467
p_expected = 0.000914
w, pw = stats.shapiro(x4)
assert_almost_equal(w, W_expected, decimal=4)
assert_almost_equal(pw, p_expected, decimal=5)
def test_2d(self):
x1 = [[0.11, 7.87, 4.61, 10.14, 7.95, 3.14, 0.46,
4.43, 0.21, 4.75], [0.71, 1.52, 3.24,
0.93, 0.42, 4.97, 9.53, 4.55, 0.47, 6.66]]
w, pw = stats.shapiro(x1)
assert_almost_equal(w, 0.90047299861907959, 6)
assert_almost_equal(pw, 0.042089745402336121, 6)
x2 = [[1.36, 1.14, 2.92, 2.55, 1.46, 1.06, 5.27, -1.11,
3.48, 1.10], [0.88, -0.51, 1.46, 0.52, 6.20, 1.69,
0.08, 3.67, 2.81, 3.49]]
w, pw = stats.shapiro(x2)
assert_almost_equal(w, 0.9590270, 6)
assert_almost_equal(pw, 0.52460, 3)
def test_empty_input(self):
assert_raises(ValueError, stats.shapiro, [])
assert_raises(ValueError, stats.shapiro, [[], [], []])
def test_not_enough_values(self):
assert_raises(ValueError, stats.shapiro, [1, 2])
assert_raises(ValueError, stats.shapiro, [[], [2]])
def test_bad_arg(self):
# Length of x is less than 3.
x = [1]
assert_raises(ValueError, stats.shapiro, x)
def test_nan_input(self):
x = np.arange(10.)
x[9] = np.nan
w, pw = stats.shapiro(x)
assert_equal(w, np.nan)
assert_almost_equal(pw, 1.0)
class TestAnderson(object):
def test_normal(self):
rs = RandomState(1234567890)
x1 = rs.standard_exponential(size=50)
x2 = rs.standard_normal(size=50)
A, crit, sig = stats.anderson(x1)
assert_array_less(crit[:-1], A)
A, crit, sig = stats.anderson(x2)
assert_array_less(A, crit[-2:])
v = np.ones(10)
v[0] = 0
A, crit, sig = stats.anderson(v)
# The expected statistic 3.208057 was computed independently of scipy.
# For example, in R:
# > library(nortest)
# > v <- rep(1, 10)
# > v[1] <- 0
# > result <- ad.test(v)
# > result$statistic
# A
# 3.208057
assert_allclose(A, 3.208057)
def test_expon(self):
rs = RandomState(1234567890)
x1 = rs.standard_exponential(size=50)
x2 = rs.standard_normal(size=50)
A, crit, sig = stats.anderson(x1, 'expon')
assert_array_less(A, crit[-2:])
olderr = np.seterr(all='ignore')
try:
A, crit, sig = stats.anderson(x2, 'expon')
finally:
np.seterr(**olderr)
assert_(A > crit[-1])
def test_gumbel(self):
# Regression test for gh-6306. Before that issue was fixed,
# this case would return a2=inf.
v = np.ones(100)
v[0] = 0.0
a2, crit, sig = stats.anderson(v, 'gumbel')
# A brief reimplementation of the calculation of the statistic.
n = len(v)
xbar, s = stats.gumbel_l.fit(v)
logcdf = stats.gumbel_l.logcdf(v, xbar, s)
logsf = stats.gumbel_l.logsf(v, xbar, s)
i = np.arange(1, n+1)
expected_a2 = -n - np.mean((2*i - 1) * (logcdf + logsf[::-1]))
assert_allclose(a2, expected_a2)
def test_bad_arg(self):
assert_raises(ValueError, stats.anderson, [1], dist='plate_of_shrimp')
def test_result_attributes(self):
rs = RandomState(1234567890)
x = rs.standard_exponential(size=50)
res = stats.anderson(x)
attributes = ('statistic', 'critical_values', 'significance_level')
check_named_results(res, attributes)
def test_gumbel_l(self):
# gh-2592, gh-6337
# Adds support to 'gumbel_r' and 'gumbel_l' as valid inputs for dist.
rs = RandomState(1234567890)
x = rs.gumbel(size=100)
A1, crit1, sig1 = stats.anderson(x, 'gumbel')
A2, crit2, sig2 = stats.anderson(x, 'gumbel_l')
assert_allclose(A2, A1)
def test_gumbel_r(self):
# gh-2592, gh-6337
# Adds support to 'gumbel_r' and 'gumbel_l' as valid inputs for dist.
rs = RandomState(1234567890)
x1 = rs.gumbel(size=100)
x2 = np.ones(100)
A1, crit1, sig1 = stats.anderson(x1, 'gumbel_r')
A2, crit2, sig2 = stats.anderson(x2, 'gumbel_r')
assert_array_less(A1, crit1[-2:])
assert_(A2 > crit2[-1])
class TestAndersonKSamp(object):
def test_example1a(self):
# Example data from Scholz & Stephens (1987), originally
# published in Lehmann (1995, Nonparametrics, Statistical
# Methods Based on Ranks, p. 309)
# Pass a mixture of lists and arrays
t1 = [38.7, 41.5, 43.8, 44.5, 45.5, 46.0, 47.7, 58.0]
t2 = np.array([39.2, 39.3, 39.7, 41.4, 41.8, 42.9, 43.3, 45.8])
t3 = np.array([34.0, 35.0, 39.0, 40.0, 43.0, 43.0, 44.0, 45.0])
t4 = np.array([34.0, 34.8, 34.8, 35.4, 37.2, 37.8, 41.2, 42.8])
assert_warns(UserWarning, stats.anderson_ksamp, (t1, t2, t3, t4),
midrank=False)
with suppress_warnings() as sup:
sup.filter(UserWarning, message='approximate p-value')
Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4), midrank=False)
assert_almost_equal(Tk, 4.449, 3)
assert_array_almost_equal([0.4985, 1.3237, 1.9158, 2.4930, 3.2459],
tm, 4)
assert_almost_equal(p, 0.0021, 4)
def test_example1b(self):
# Example data from Scholz & Stephens (1987), originally
# published in Lehmann (1995, Nonparametrics, Statistical
# Methods Based on Ranks, p. 309)
# Pass arrays
t1 = np.array([38.7, 41.5, 43.8, 44.5, 45.5, 46.0, 47.7, 58.0])
t2 = np.array([39.2, 39.3, 39.7, 41.4, 41.8, 42.9, 43.3, 45.8])
t3 = np.array([34.0, 35.0, 39.0, 40.0, 43.0, 43.0, 44.0, 45.0])
t4 = np.array([34.0, 34.8, 34.8, 35.4, 37.2, 37.8, 41.2, 42.8])
with suppress_warnings() as sup:
sup.filter(UserWarning, message='approximate p-value')
Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4), midrank=True)
assert_almost_equal(Tk, 4.480, 3)
assert_array_almost_equal([0.4985, 1.3237, 1.9158, 2.4930, 3.2459],
tm, 4)
assert_almost_equal(p, 0.0020, 4)
def test_example2a(self):
# Example data taken from an earlier technical report of
# Scholz and Stephens
# Pass lists instead of arrays
t1 = [194, 15, 41, 29, 33, 181]
t2 = [413, 14, 58, 37, 100, 65, 9, 169, 447, 184, 36, 201, 118]
t3 = [34, 31, 18, 18, 67, 57, 62, 7, 22, 34]
t4 = [90, 10, 60, 186, 61, 49, 14, 24, 56, 20, 79, 84, 44, 59, 29,
118, 25, 156, 310, 76, 26, 44, 23, 62]
t5 = [130, 208, 70, 101, 208]
t6 = [74, 57, 48, 29, 502, 12, 70, 21, 29, 386, 59, 27]
t7 = [55, 320, 56, 104, 220, 239, 47, 246, 176, 182, 33]
t8 = [23, 261, 87, 7, 120, 14, 62, 47, 225, 71, 246, 21, 42, 20, 5,
12, 120, 11, 3, 14, 71, 11, 14, 11, 16, 90, 1, 16, 52, 95]
t9 = [97, 51, 11, 4, 141, 18, 142, 68, 77, 80, 1, 16, 106, 206, 82,
54, 31, 216, 46, 111, 39, 63, 18, 191, 18, 163, 24]
t10 = [50, 44, 102, 72, 22, 39, 3, 15, 197, 188, 79, 88, 46, 5, 5, 36,
22, 139, 210, 97, 30, 23, 13, 14]
t11 = [359, 9, 12, 270, 603, 3, 104, 2, 438]
t12 = [50, 254, 5, 283, 35, 12]
t13 = [487, 18, 100, 7, 98, 5, 85, 91, 43, 230, 3, 130]
t14 = [102, 209, 14, 57, 54, 32, 67, 59, 134, 152, 27, 14, 230, 66,
61, 34]
with suppress_warnings() as sup:
sup.filter(UserWarning, message='approximate p-value')
Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4, t5, t6, t7, t8,
t9, t10, t11, t12, t13, t14),
midrank=False)
assert_almost_equal(Tk, 3.288, 3)
assert_array_almost_equal([0.5990, 1.3269, 1.8052, 2.2486, 2.8009],
tm, 4)
assert_almost_equal(p, 0.0041, 4)
def test_example2b(self):
# Example data taken from an earlier technical report of
# Scholz and Stephens
t1 = [194, 15, 41, 29, 33, 181]
t2 = [413, 14, 58, 37, 100, 65, 9, 169, 447, 184, 36, 201, 118]
t3 = [34, 31, 18, 18, 67, 57, 62, 7, 22, 34]
t4 = [90, 10, 60, 186, 61, 49, 14, 24, 56, 20, 79, 84, 44, 59, 29,
118, 25, 156, 310, 76, 26, 44, 23, 62]
t5 = [130, 208, 70, 101, 208]
t6 = [74, 57, 48, 29, 502, 12, 70, 21, 29, 386, 59, 27]
t7 = [55, 320, 56, 104, 220, 239, 47, 246, 176, 182, 33]
t8 = [23, 261, 87, 7, 120, 14, 62, 47, 225, 71, 246, 21, 42, 20, 5,
12, 120, 11, 3, 14, 71, 11, 14, 11, 16, 90, 1, 16, 52, 95]
t9 = [97, 51, 11, 4, 141, 18, 142, 68, 77, 80, 1, 16, 106, 206, 82,
54, 31, 216, 46, 111, 39, 63, 18, 191, 18, 163, 24]
t10 = [50, 44, 102, 72, 22, 39, 3, 15, 197, 188, 79, 88, 46, 5, 5, 36,
22, 139, 210, 97, 30, 23, 13, 14]
t11 = [359, 9, 12, 270, 603, 3, 104, 2, 438]
t12 = [50, 254, 5, 283, 35, 12]
t13 = [487, 18, 100, 7, 98, 5, 85, 91, 43, 230, 3, 130]
t14 = [102, 209, 14, 57, 54, 32, 67, 59, 134, 152, 27, 14, 230, 66,
61, 34]
with suppress_warnings() as sup:
sup.filter(UserWarning, message='approximate p-value')
Tk, tm, p = stats.anderson_ksamp((t1, t2, t3, t4, t5, t6, t7, t8,
t9, t10, t11, t12, t13, t14),
midrank=True)
assert_almost_equal(Tk, 3.294, 3)
assert_array_almost_equal([0.5990, 1.3269, 1.8052, 2.2486, 2.8009],
tm, 4)
assert_almost_equal(p, 0.0041, 4)
def test_not_enough_samples(self):
assert_raises(ValueError, stats.anderson_ksamp, np.ones(5))
def test_no_distinct_observations(self):
assert_raises(ValueError, stats.anderson_ksamp,
(np.ones(5), np.ones(5)))
def test_empty_sample(self):
assert_raises(ValueError, stats.anderson_ksamp, (np.ones(5), []))
def test_result_attributes(self):
# Example data from Scholz & Stephens (1987), originally
# published in Lehmann (1995, Nonparametrics, Statistical
# Methods Based on Ranks, p. 309)
# Pass a mixture of lists and arrays
t1 = [38.7, 41.5, 43.8, 44.5, 45.5, 46.0, 47.7, 58.0]
t2 = np.array([39.2, 39.3, 39.7, 41.4, 41.8, 42.9, 43.3, 45.8])
t3 = np.array([34.0, 35.0, 39.0, 40.0, 43.0, 43.0, 44.0, 45.0])
t4 = np.array([34.0, 34.8, 34.8, 35.4, 37.2, 37.8, 41.2, 42.8])
with suppress_warnings() as sup:
sup.filter(UserWarning, message='approximate p-value')
res = stats.anderson_ksamp((t1, t2, t3, t4), midrank=False)
attributes = ('statistic', 'critical_values', 'significance_level')
check_named_results(res, attributes)
def test_overflow(self):
# when significance_level approximation overflows, should still return
with suppress_warnings() as sup:
sup.filter(UserWarning, message='approximate p-value')
res = stats.anderson_ksamp([[-20, -10] * 100, [-10, 40, 12] * 100])
assert_almost_equal(res[0], 272.796, 3)
class TestAnsari(object):
def test_small(self):
x = [1, 2, 3, 3, 4]
y = [3, 2, 6, 1, 6, 1, 4, 1]
with suppress_warnings() as sup:
sup.filter(UserWarning, "Ties preclude use of exact statistic.")
W, pval = stats.ansari(x, y)
assert_almost_equal(W, 23.5, 11)
assert_almost_equal(pval, 0.13499256881897437, 11)
def test_approx(self):
ramsay = np.array((111, 107, 100, 99, 102, 106, 109, 108, 104, 99,
101, 96, 97, 102, 107, 113, 116, 113, 110, 98))
parekh = np.array((107, 108, 106, 98, 105, 103, 110, 105, 104,
100, 96, 108, 103, 104, 114, 114, 113, 108,
106, 99))
with suppress_warnings() as sup:
sup.filter(UserWarning, "Ties preclude use of exact statistic.")
W, pval = stats.ansari(ramsay, parekh)
assert_almost_equal(W, 185.5, 11)
assert_almost_equal(pval, 0.18145819972867083, 11)
def test_exact(self):
W, pval = stats.ansari([1, 2, 3, 4], [15, 5, 20, 8, 10, 12])
assert_almost_equal(W, 10.0, 11)
assert_almost_equal(pval, 0.533333333333333333, 7)
def test_bad_arg(self):
assert_raises(ValueError, stats.ansari, [], [1])
assert_raises(ValueError, stats.ansari, [1], [])
def test_result_attributes(self):
x = [1, 2, 3, 3, 4]
y = [3, 2, 6, 1, 6, 1, 4, 1]
with suppress_warnings() as sup:
sup.filter(UserWarning, "Ties preclude use of exact statistic.")
res = stats.ansari(x, y)
attributes = ('statistic', 'pvalue')
check_named_results(res, attributes)
class TestBartlett(object):
def test_data(self):
args = [g1, g2, g3, g4, g5, g6, g7, g8, g9, g10]
T, pval = stats.bartlett(*args)
assert_almost_equal(T, 20.78587342806484, 7)
assert_almost_equal(pval, 0.0136358632781, 7)
def test_bad_arg(self):
# Too few args raises ValueError.
assert_raises(ValueError, stats.bartlett, [1])
def test_result_attributes(self):
args = [g1, g2, g3, g4, g5, g6, g7, g8, g9, g10]
res = stats.bartlett(*args)
attributes = ('statistic', 'pvalue')
check_named_results(res, attributes)
def test_empty_arg(self):
args = (g1, g2, g3, g4, g5, g6, g7, g8, g9, g10, [])
assert_equal((np.nan, np.nan), stats.bartlett(*args))
class TestLevene(object):
def test_data(self):
args = [g1, g2, g3, g4, g5, g6, g7, g8, g9, g10]
W, pval = stats.levene(*args)
assert_almost_equal(W, 1.7059176930008939, 7)
assert_almost_equal(pval, 0.0990829755522, 7)
def test_trimmed1(self):
# Test that center='trimmed' gives the same result as center='mean'
# when proportiontocut=0.
W1, pval1 = stats.levene(g1, g2, g3, center='mean')
W2, pval2 = stats.levene(g1, g2, g3, center='trimmed',
proportiontocut=0.0)
assert_almost_equal(W1, W2)
assert_almost_equal(pval1, pval2)
def test_trimmed2(self):
x = [1.2, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 100.0]
y = [0.0, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 200.0]
np.random.seed(1234)
x2 = np.random.permutation(x)
# Use center='trimmed'
W0, pval0 = stats.levene(x, y, center='trimmed',
proportiontocut=0.125)
W1, pval1 = stats.levene(x2, y, center='trimmed',
proportiontocut=0.125)
# Trim the data here, and use center='mean'
W2, pval2 = stats.levene(x[1:-1], y[1:-1], center='mean')
# Result should be the same.
assert_almost_equal(W0, W2)
assert_almost_equal(W1, W2)
assert_almost_equal(pval1, pval2)
def test_equal_mean_median(self):
x = np.linspace(-1, 1, 21)
np.random.seed(1234)
x2 = np.random.permutation(x)
y = x**3
W1, pval1 = stats.levene(x, y, center='mean')
W2, pval2 = stats.levene(x2, y, center='median')
assert_almost_equal(W1, W2)
assert_almost_equal(pval1, pval2)
def test_bad_keyword(self):
x = np.linspace(-1, 1, 21)
assert_raises(TypeError, stats.levene, x, x, portiontocut=0.1)
def test_bad_center_value(self):
x = np.linspace(-1, 1, 21)
assert_raises(ValueError, stats.levene, x, x, center='trim')
def test_too_few_args(self):
assert_raises(ValueError, stats.levene, [1])
def test_result_attributes(self):
args = [g1, g2, g3, g4, g5, g6, g7, g8, g9, g10]
res = stats.levene(*args)
attributes = ('statistic', 'pvalue')
check_named_results(res, attributes)
class TestBinomP(object):
def test_data(self):
pval = stats.binom_test(100, 250)
assert_almost_equal(pval, 0.0018833009350757682, 11)
pval = stats.binom_test(201, 405)
assert_almost_equal(pval, 0.92085205962670713, 11)
pval = stats.binom_test([682, 243], p=3.0/4)
assert_almost_equal(pval, 0.38249155957481695, 11)
def test_bad_len_x(self):
# Length of x must be 1 or 2.
assert_raises(ValueError, stats.binom_test, [1, 2, 3])
def test_bad_n(self):
# len(x) is 1, but n is invalid.
# Missing n
assert_raises(ValueError, stats.binom_test, [100])
# n less than x[0]
assert_raises(ValueError, stats.binom_test, [100], n=50)
def test_bad_p(self):
assert_raises(ValueError, stats.binom_test, [50, 50], p=2.0)
def test_alternatives(self):
res = stats.binom_test(51, 235, p=1./6, alternative='less')
assert_almost_equal(res, 0.982022657605858)
res = stats.binom_test(51, 235, p=1./6, alternative='greater')
assert_almost_equal(res, 0.02654424571169085)
res = stats.binom_test(51, 235, p=1./6, alternative='two-sided')
assert_almost_equal(res, 0.0437479701823997)
class TestFligner(object):
def test_data(self):
# numbers from R: fligner.test in package stats
x1 = np.arange(5)
assert_array_almost_equal(stats.fligner(x1, x1**2),
(3.2282229927203536, 0.072379187848207877),
11)
def test_trimmed1(self):
# Perturb input to break ties in the transformed data
# See https://github.com/scipy/scipy/pull/8042 for more details
rs = np.random.RandomState(123)
_perturb = lambda g: (np.asarray(g) + 1e-10*rs.randn(len(g))).tolist()
g1_ = _perturb(g1)
g2_ = _perturb(g2)
g3_ = _perturb(g3)
# Test that center='trimmed' gives the same result as center='mean'
# when proportiontocut=0.
Xsq1, pval1 = stats.fligner(g1_, g2_, g3_, center='mean')
Xsq2, pval2 = stats.fligner(g1_, g2_, g3_, center='trimmed',
proportiontocut=0.0)
assert_almost_equal(Xsq1, Xsq2)
assert_almost_equal(pval1, pval2)
def test_trimmed2(self):
x = [1.2, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 100.0]
y = [0.0, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 200.0]
# Use center='trimmed'
Xsq1, pval1 = stats.fligner(x, y, center='trimmed',
proportiontocut=0.125)
# Trim the data here, and use center='mean'
Xsq2, pval2 = stats.fligner(x[1:-1], y[1:-1], center='mean')
# Result should be the same.
assert_almost_equal(Xsq1, Xsq2)
assert_almost_equal(pval1, pval2)
# The following test looks reasonable at first, but fligner() uses the
# function stats.rankdata(), and in one of the cases in this test,
# there are ties, while in the other (because of normal rounding
# errors) there are not. This difference leads to differences in the
# third significant digit of W.
#
#def test_equal_mean_median(self):
# x = np.linspace(-1,1,21)
# y = x**3
# W1, pval1 = stats.fligner(x, y, center='mean')
# W2, pval2 = stats.fligner(x, y, center='median')
# assert_almost_equal(W1, W2)
# assert_almost_equal(pval1, pval2)
def test_bad_keyword(self):
x = np.linspace(-1, 1, 21)
assert_raises(TypeError, stats.fligner, x, x, portiontocut=0.1)
def test_bad_center_value(self):
x = np.linspace(-1, 1, 21)
assert_raises(ValueError, stats.fligner, x, x, center='trim')
def test_bad_num_args(self):
# Too few args raises ValueError.
assert_raises(ValueError, stats.fligner, [1])
def test_empty_arg(self):
x = np.arange(5)
assert_equal((np.nan, np.nan), stats.fligner(x, x**2, []))
class TestMood(object):
def test_mood(self):
# numbers from R: mood.test in package stats
x1 = np.arange(5)
assert_array_almost_equal(stats.mood(x1, x1**2),
(-1.3830857299399906, 0.16663858066771478),
11)
def test_mood_order_of_args(self):
# z should change sign when the order of arguments changes, pvalue
# should not change
np.random.seed(1234)
x1 = np.random.randn(10, 1)
x2 = np.random.randn(15, 1)
z1, p1 = stats.mood(x1, x2)
z2, p2 = stats.mood(x2, x1)
assert_array_almost_equal([z1, p1], [-z2, p2])
def test_mood_with_axis_none(self):
# Test with axis = None, compare with results from R
x1 = [-0.626453810742332, 0.183643324222082, -0.835628612410047,
1.59528080213779, 0.329507771815361, -0.820468384118015,
0.487429052428485, 0.738324705129217, 0.575781351653492,
-0.305388387156356, 1.51178116845085, 0.389843236411431,
-0.621240580541804, -2.2146998871775, 1.12493091814311,
-0.0449336090152309, -0.0161902630989461, 0.943836210685299,
0.821221195098089, 0.593901321217509]
x2 = [-0.896914546624981, 0.184849184646742, 1.58784533120882,
-1.13037567424629, -0.0802517565509893, 0.132420284381094,
0.707954729271733, -0.23969802417184, 1.98447393665293,
-0.138787012119665, 0.417650750792556, 0.981752777463662,
-0.392695355503813, -1.03966897694891, 1.78222896030858,
-2.31106908460517, 0.878604580921265, 0.035806718015226,
1.01282869212708, 0.432265154539617, 2.09081920524915,
-1.19992581964387, 1.58963820029007, 1.95465164222325,
0.00493777682814261, -2.45170638784613, 0.477237302613617,
-0.596558168631403, 0.792203270299649, 0.289636710177348]
x1 = np.array(x1)
x2 = np.array(x2)
x1.shape = (10, 2)
x2.shape = (15, 2)
assert_array_almost_equal(stats.mood(x1, x2, axis=None),
[-1.31716607555, 0.18778296257])
def test_mood_2d(self):
# Test if the results of mood test in 2-D case are consistent with the
# R result for the same inputs. Numbers from R mood.test().
ny = 5
np.random.seed(1234)
x1 = np.random.randn(10, ny)
x2 = np.random.randn(15, ny)
z_vectest, pval_vectest = stats.mood(x1, x2)
for j in range(ny):
assert_array_almost_equal([z_vectest[j], pval_vectest[j]],
stats.mood(x1[:, j], x2[:, j]))
# inverse order of dimensions
x1 = x1.transpose()
x2 = x2.transpose()
z_vectest, pval_vectest = stats.mood(x1, x2, axis=1)
for i in range(ny):
# check axis handling is self consistent
assert_array_almost_equal([z_vectest[i], pval_vectest[i]],
stats.mood(x1[i, :], x2[i, :]))
def test_mood_3d(self):
shape = (10, 5, 6)
np.random.seed(1234)
x1 = np.random.randn(*shape)
x2 = np.random.randn(*shape)
for axis in range(3):
z_vectest, pval_vectest = stats.mood(x1, x2, axis=axis)
# Tests that result for 3-D arrays is equal to that for the
# same calculation on a set of 1-D arrays taken from the
# 3-D array
axes_idx = ([1, 2], [0, 2], [0, 1]) # the two axes != axis
for i in range(shape[axes_idx[axis][0]]):
for j in range(shape[axes_idx[axis][1]]):
if axis == 0:
slice1 = x1[:, i, j]
slice2 = x2[:, i, j]
elif axis == 1:
slice1 = x1[i, :, j]
slice2 = x2[i, :, j]
else:
slice1 = x1[i, j, :]
slice2 = x2[i, j, :]
assert_array_almost_equal([z_vectest[i, j],
pval_vectest[i, j]],
stats.mood(slice1, slice2))
def test_mood_bad_arg(self):
# Raise ValueError when the sum of the lengths of the args is
# less than 3
assert_raises(ValueError, stats.mood, [1], [])
class TestProbplot(object):
def test_basic(self):
np.random.seed(12345)
x = stats.norm.rvs(size=20)
osm, osr = stats.probplot(x, fit=False)
osm_expected = [-1.8241636, -1.38768012, -1.11829229, -0.91222575,
-0.73908135, -0.5857176, -0.44506467, -0.31273668,
-0.18568928, -0.06158146, 0.06158146, 0.18568928,
0.31273668, 0.44506467, 0.5857176, 0.73908135,
0.91222575, 1.11829229, 1.38768012, 1.8241636]
assert_allclose(osr, np.sort(x))
assert_allclose(osm, osm_expected)
res, res_fit = stats.probplot(x, fit=True)
res_fit_expected = [1.05361841, 0.31297795, 0.98741609]
assert_allclose(res_fit, res_fit_expected)
def test_sparams_keyword(self):
np.random.seed(123456)
x = stats.norm.rvs(size=100)
# Check that None, () and 0 (loc=0, for normal distribution) all work
# and give the same results
osm1, osr1 = stats.probplot(x, sparams=None, fit=False)
osm2, osr2 = stats.probplot(x, sparams=0, fit=False)
osm3, osr3 = stats.probplot(x, sparams=(), fit=False)
assert_allclose(osm1, osm2)
assert_allclose(osm1, osm3)
assert_allclose(osr1, osr2)
assert_allclose(osr1, osr3)
# Check giving (loc, scale) params for normal distribution
osm, osr = stats.probplot(x, sparams=(), fit=False)
def test_dist_keyword(self):
np.random.seed(12345)
x = stats.norm.rvs(size=20)
osm1, osr1 = stats.probplot(x, fit=False, dist='t', sparams=(3,))
osm2, osr2 = stats.probplot(x, fit=False, dist=stats.t, sparams=(3,))
assert_allclose(osm1, osm2)
assert_allclose(osr1, osr2)
assert_raises(ValueError, stats.probplot, x, dist='wrong-dist-name')
assert_raises(AttributeError, stats.probplot, x, dist=[])
class custom_dist(object):
"""Some class that looks just enough like a distribution."""
def ppf(self, q):
return stats.norm.ppf(q, loc=2)
osm1, osr1 = stats.probplot(x, sparams=(2,), fit=False)
osm2, osr2 = stats.probplot(x, dist=custom_dist(), fit=False)
assert_allclose(osm1, osm2)
assert_allclose(osr1, osr2)
@pytest.mark.skipif(not have_matplotlib, reason="no matplotlib")
def test_plot_kwarg(self):
np.random.seed(7654321)
fig = plt.figure()
fig.add_subplot(111)
x = stats.t.rvs(3, size=100)
res1, fitres1 = stats.probplot(x, plot=plt)
plt.close()
res2, fitres2 = stats.probplot(x, plot=None)
res3 = stats.probplot(x, fit=False, plot=plt)
plt.close()
res4 = stats.probplot(x, fit=False, plot=None)
# Check that results are consistent between combinations of `fit` and
# `plot` keywords.
assert_(len(res1) == len(res2) == len(res3) == len(res4) == 2)
assert_allclose(res1, res2)
assert_allclose(res1, res3)
assert_allclose(res1, res4)
assert_allclose(fitres1, fitres2)
# Check that a Matplotlib Axes object is accepted
fig = plt.figure()
ax = fig.add_subplot(111)
stats.probplot(x, fit=False, plot=ax)
plt.close()
def test_probplot_bad_args(self):
# Raise ValueError when given an invalid distribution.
assert_raises(ValueError, stats.probplot, [1], dist="plate_of_shrimp")
def test_empty(self):
assert_equal(stats.probplot([], fit=False),
(np.array([]), np.array([])))
assert_equal(stats.probplot([], fit=True),
((np.array([]), np.array([])),
(np.nan, np.nan, 0.0)))
def test_array_of_size_one(self):
with np.errstate(invalid='ignore'):
assert_equal(stats.probplot([1], fit=True),
((np.array([0.]), np.array([1])),
(np.nan, np.nan, 0.0)))
def test_wilcoxon_bad_arg():
# Raise ValueError when two args of different lengths are given or
# zero_method is unknown.
assert_raises(ValueError, stats.wilcoxon, [1], [1, 2])
assert_raises(ValueError, stats.wilcoxon, [1, 2], [1, 2], "dummy")
def test_wilcoxon_arg_type():
# Should be able to accept list as arguments.
# Address issue 6070.
arr = [1, 2, 3, 0, -1, 3, 1, 2, 1, 1, 2]
_ = stats.wilcoxon(arr, zero_method="pratt")
_ = stats.wilcoxon(arr, zero_method="zsplit")
_ = stats.wilcoxon(arr, zero_method="wilcox")
class TestKstat(object):
def test_moments_normal_distribution(self):
np.random.seed(32149)
data = np.random.randn(12345)
moments = []
for n in [1, 2, 3, 4]:
moments.append(stats.kstat(data, n))
expected = [0.011315, 1.017931, 0.05811052, 0.0754134]
assert_allclose(moments, expected, rtol=1e-4)
# test equivalence with `stats.moment`
m1 = stats.moment(data, moment=1)
m2 = stats.moment(data, moment=2)
m3 = stats.moment(data, moment=3)
assert_allclose((m1, m2, m3), expected[:-1], atol=0.02, rtol=1e-2)
def test_empty_input(self):
assert_raises(ValueError, stats.kstat, [])
def test_nan_input(self):
data = np.arange(10.)
data[6] = np.nan
assert_equal(stats.kstat(data), np.nan)
def test_kstat_bad_arg(self):
# Raise ValueError if n > 4 or n < 1.
data = np.arange(10)
for n in [0, 4.001]:
assert_raises(ValueError, stats.kstat, data, n=n)
class TestKstatVar(object):
def test_empty_input(self):
assert_raises(ValueError, stats.kstatvar, [])
def test_nan_input(self):
data = np.arange(10.)
data[6] = np.nan
assert_equal(stats.kstat(data), np.nan)
def test_bad_arg(self):
# Raise ValueError is n is not 1 or 2.
data = [1]
n = 10
assert_raises(ValueError, stats.kstatvar, data, n=n)
class TestPpccPlot(object):
def setup_method(self):
np.random.seed(7654321)
self.x = stats.loggamma.rvs(5, size=500) + 5
def test_basic(self):
N = 5
svals, ppcc = stats.ppcc_plot(self.x, -10, 10, N=N)
ppcc_expected = [0.21139644, 0.21384059, 0.98766719, 0.97980182,
0.93519298]
assert_allclose(svals, np.linspace(-10, 10, num=N))
assert_allclose(ppcc, ppcc_expected)
def test_dist(self):
# Test that we can specify distributions both by name and as objects.
svals1, ppcc1 = stats.ppcc_plot(self.x, -10, 10, dist='tukeylambda')
svals2, ppcc2 = stats.ppcc_plot(self.x, -10, 10,
dist=stats.tukeylambda)
assert_allclose(svals1, svals2, rtol=1e-20)
assert_allclose(ppcc1, ppcc2, rtol=1e-20)
# Test that 'tukeylambda' is the default dist
svals3, ppcc3 = stats.ppcc_plot(self.x, -10, 10)
assert_allclose(svals1, svals3, rtol=1e-20)
assert_allclose(ppcc1, ppcc3, rtol=1e-20)
@pytest.mark.skipif(not have_matplotlib, reason="no matplotlib")
def test_plot_kwarg(self):
# Check with the matplotlib.pyplot module
fig = plt.figure()
ax = fig.add_subplot(111)
stats.ppcc_plot(self.x, -20, 20, plot=plt)
fig.delaxes(ax)
# Check that a Matplotlib Axes object is accepted
ax = fig.add_subplot(111)
stats.ppcc_plot(self.x, -20, 20, plot=ax)
plt.close()
def test_invalid_inputs(self):
# `b` has to be larger than `a`
assert_raises(ValueError, stats.ppcc_plot, self.x, 1, 0)
# Raise ValueError when given an invalid distribution.
assert_raises(ValueError, stats.ppcc_plot, [1, 2, 3], 0, 1,
dist="plate_of_shrimp")
def test_empty(self):
# For consistency with probplot return for one empty array,
# ppcc contains all zeros and svals is the same as for normal array
# input.
svals, ppcc = stats.ppcc_plot([], 0, 1)
assert_allclose(svals, np.linspace(0, 1, num=80))
assert_allclose(ppcc, np.zeros(80, dtype=float))
class TestPpccMax(object):
def test_ppcc_max_bad_arg(self):
# Raise ValueError when given an invalid distribution.
data = [1]
assert_raises(ValueError, stats.ppcc_max, data, dist="plate_of_shrimp")
def test_ppcc_max_basic(self):
np.random.seed(1234567)
x = stats.tukeylambda.rvs(-0.7, loc=2, scale=0.5, size=10000) + 1e4
# On Python 2.6 the result is accurate to 5 decimals. On Python >= 2.7
# it is accurate up to 16 decimals
assert_almost_equal(stats.ppcc_max(x), -0.71215366521264145, decimal=5)
def test_dist(self):
np.random.seed(1234567)
x = stats.tukeylambda.rvs(-0.7, loc=2, scale=0.5, size=10000) + 1e4
# Test that we can specify distributions both by name and as objects.
max1 = stats.ppcc_max(x, dist='tukeylambda')
max2 = stats.ppcc_max(x, dist=stats.tukeylambda)
assert_almost_equal(max1, -0.71215366521264145, decimal=5)
assert_almost_equal(max2, -0.71215366521264145, decimal=5)
# Test that 'tukeylambda' is the default dist
max3 = stats.ppcc_max(x)
assert_almost_equal(max3, -0.71215366521264145, decimal=5)
def test_brack(self):
np.random.seed(1234567)
x = stats.tukeylambda.rvs(-0.7, loc=2, scale=0.5, size=10000) + 1e4
assert_raises(ValueError, stats.ppcc_max, x, brack=(0.0, 1.0, 0.5))
# On Python 2.6 the result is accurate to 5 decimals. On Python >= 2.7
# it is accurate up to 16 decimals
assert_almost_equal(stats.ppcc_max(x, brack=(0, 1)),
-0.71215366521264145, decimal=5)
# On Python 2.6 the result is accurate to 5 decimals. On Python >= 2.7
# it is accurate up to 16 decimals
assert_almost_equal(stats.ppcc_max(x, brack=(-2, 2)),
-0.71215366521264145, decimal=5)
class TestBoxcox_llf(object):
def test_basic(self):
np.random.seed(54321)
x = stats.norm.rvs(size=10000, loc=10)
lmbda = 1
llf = stats.boxcox_llf(lmbda, x)
llf_expected = -x.size / 2. * np.log(np.sum(x.std()**2))
assert_allclose(llf, llf_expected)
def test_array_like(self):
np.random.seed(54321)
x = stats.norm.rvs(size=100, loc=10)
lmbda = 1
llf = stats.boxcox_llf(lmbda, x)
llf2 = stats.boxcox_llf(lmbda, list(x))
assert_allclose(llf, llf2, rtol=1e-12)
def test_2d_input(self):
# Note: boxcox_llf() was already working with 2-D input (sort of), so
# keep it like that. boxcox() doesn't work with 2-D input though, due
# to brent() returning a scalar.
np.random.seed(54321)
x = stats.norm.rvs(size=100, loc=10)
lmbda = 1
llf = stats.boxcox_llf(lmbda, x)
llf2 = stats.boxcox_llf(lmbda, np.vstack([x, x]).T)
assert_allclose([llf, llf], llf2, rtol=1e-12)
def test_empty(self):
assert_(np.isnan(stats.boxcox_llf(1, [])))
class TestBoxcox(object):
def test_fixed_lmbda(self):
np.random.seed(12345)
x = stats.loggamma.rvs(5, size=50) + 5
xt = stats.boxcox(x, lmbda=1)
assert_allclose(xt, x - 1)
xt = stats.boxcox(x, lmbda=-1)
assert_allclose(xt, 1 - 1/x)
xt = stats.boxcox(x, lmbda=0)
assert_allclose(xt, np.log(x))
# Also test that array_like input works
xt = stats.boxcox(list(x), lmbda=0)
assert_allclose(xt, np.log(x))
def test_lmbda_None(self):
np.random.seed(1234567)
# Start from normal rv's, do inverse transform to check that
# optimization function gets close to the right answer.
np.random.seed(1245)
lmbda = 2.5
x = stats.norm.rvs(loc=10, size=50000)
x_inv = (x * lmbda + 1)**(-lmbda)
xt, maxlog = stats.boxcox(x_inv)
assert_almost_equal(maxlog, -1 / lmbda, decimal=2)
def test_alpha(self):
np.random.seed(1234)
x = stats.loggamma.rvs(5, size=50) + 5
# Some regular values for alpha, on a small sample size
_, _, interval = stats.boxcox(x, alpha=0.75)
assert_allclose(interval, [4.004485780226041, 5.138756355035744])
_, _, interval = stats.boxcox(x, alpha=0.05)
assert_allclose(interval, [1.2138178554857557, 8.209033272375663])
# Try some extreme values, see we don't hit the N=500 limit
x = stats.loggamma.rvs(7, size=500) + 15
_, _, interval = stats.boxcox(x, alpha=0.001)
assert_allclose(interval, [0.3988867, 11.40553131])
_, _, interval = stats.boxcox(x, alpha=0.999)
assert_allclose(interval, [5.83316246, 5.83735292])
def test_boxcox_bad_arg(self):
# Raise ValueError if any data value is negative.
x = np.array([-1])
assert_raises(ValueError, stats.boxcox, x)
def test_empty(self):
assert_(stats.boxcox([]).shape == (0,))
class TestBoxcoxNormmax(object):
def setup_method(self):
np.random.seed(12345)
self.x = stats.loggamma.rvs(5, size=50) + 5
def test_pearsonr(self):
maxlog = stats.boxcox_normmax(self.x)
assert_allclose(maxlog, 1.804465, rtol=1e-6)
def test_mle(self):
maxlog = stats.boxcox_normmax(self.x, method='mle')
assert_allclose(maxlog, 1.758101, rtol=1e-6)
# Check that boxcox() uses 'mle'
_, maxlog_boxcox = stats.boxcox(self.x)
assert_allclose(maxlog_boxcox, maxlog)
def test_all(self):
maxlog_all = stats.boxcox_normmax(self.x, method='all')
assert_allclose(maxlog_all, [1.804465, 1.758101], rtol=1e-6)
class TestBoxcoxNormplot(object):
def setup_method(self):
np.random.seed(7654321)
self.x = stats.loggamma.rvs(5, size=500) + 5
def test_basic(self):
N = 5
lmbdas, ppcc = stats.boxcox_normplot(self.x, -10, 10, N=N)
ppcc_expected = [0.57783375, 0.83610988, 0.97524311, 0.99756057,
0.95843297]
assert_allclose(lmbdas, np.linspace(-10, 10, num=N))
assert_allclose(ppcc, ppcc_expected)
@pytest.mark.skipif(not have_matplotlib, reason="no matplotlib")
def test_plot_kwarg(self):
# Check with the matplotlib.pyplot module
fig = plt.figure()
ax = fig.add_subplot(111)
stats.boxcox_normplot(self.x, -20, 20, plot=plt)
fig.delaxes(ax)
# Check that a Matplotlib Axes object is accepted
ax = fig.add_subplot(111)
stats.boxcox_normplot(self.x, -20, 20, plot=ax)
plt.close()
def test_invalid_inputs(self):
# `lb` has to be larger than `la`
assert_raises(ValueError, stats.boxcox_normplot, self.x, 1, 0)
# `x` can not contain negative values
assert_raises(ValueError, stats.boxcox_normplot, [-1, 1], 0, 1)
def test_empty(self):
assert_(stats.boxcox_normplot([], 0, 1).size == 0)
class TestCircFuncs(object):
def test_circfuncs(self):
x = np.array([355, 5, 2, 359, 10, 350])
M = stats.circmean(x, high=360)
Mval = 0.167690146
assert_allclose(M, Mval, rtol=1e-7)
V = stats.circvar(x, high=360)
Vval = 42.51955609
assert_allclose(V, Vval, rtol=1e-7)
S = stats.circstd(x, high=360)
Sval = 6.520702116
assert_allclose(S, Sval, rtol=1e-7)
def test_circfuncs_small(self):
x = np.array([20, 21, 22, 18, 19, 20.5, 19.2])
M1 = x.mean()
M2 = stats.circmean(x, high=360)
assert_allclose(M2, M1, rtol=1e-5)
V1 = x.var()
V2 = stats.circvar(x, high=360)
assert_allclose(V2, V1, rtol=1e-4)
S1 = x.std()
S2 = stats.circstd(x, high=360)
assert_allclose(S2, S1, rtol=1e-4)
def test_circmean_axis(self):
x = np.array([[355, 5, 2, 359, 10, 350],
[351, 7, 4, 352, 9, 349],
[357, 9, 8, 358, 4, 356]])
M1 = stats.circmean(x, high=360)
M2 = stats.circmean(x.ravel(), high=360)
assert_allclose(M1, M2, rtol=1e-14)
M1 = stats.circmean(x, high=360, axis=1)
M2 = [stats.circmean(x[i], high=360) for i in range(x.shape[0])]
assert_allclose(M1, M2, rtol=1e-14)
M1 = stats.circmean(x, high=360, axis=0)
M2 = [stats.circmean(x[:, i], high=360) for i in range(x.shape[1])]
assert_allclose(M1, M2, rtol=1e-14)
def test_circvar_axis(self):
x = np.array([[355, 5, 2, 359, 10, 350],
[351, 7, 4, 352, 9, 349],
[357, 9, 8, 358, 4, 356]])
V1 = stats.circvar(x, high=360)
V2 = stats.circvar(x.ravel(), high=360)
assert_allclose(V1, V2, rtol=1e-11)
V1 = stats.circvar(x, high=360, axis=1)
V2 = [stats.circvar(x[i], high=360) for i in range(x.shape[0])]
assert_allclose(V1, V2, rtol=1e-11)
V1 = stats.circvar(x, high=360, axis=0)
V2 = [stats.circvar(x[:, i], high=360) for i in range(x.shape[1])]
assert_allclose(V1, V2, rtol=1e-11)
def test_circstd_axis(self):
x = np.array([[355, 5, 2, 359, 10, 350],
[351, 7, 4, 352, 9, 349],
[357, 9, 8, 358, 4, 356]])
S1 = stats.circstd(x, high=360)
S2 = stats.circstd(x.ravel(), high=360)
assert_allclose(S1, S2, rtol=1e-11)
S1 = stats.circstd(x, high=360, axis=1)
S2 = [stats.circstd(x[i], high=360) for i in range(x.shape[0])]
assert_allclose(S1, S2, rtol=1e-11)
S1 = stats.circstd(x, high=360, axis=0)
S2 = [stats.circstd(x[:, i], high=360) for i in range(x.shape[1])]
assert_allclose(S1, S2, rtol=1e-11)
def test_circfuncs_array_like(self):
x = [355, 5, 2, 359, 10, 350]
assert_allclose(stats.circmean(x, high=360), 0.167690146, rtol=1e-7)
assert_allclose(stats.circvar(x, high=360), 42.51955609, rtol=1e-7)
assert_allclose(stats.circstd(x, high=360), 6.520702116, rtol=1e-7)
def test_empty(self):
assert_(np.isnan(stats.circmean([])))
assert_(np.isnan(stats.circstd([])))
assert_(np.isnan(stats.circvar([])))
def test_circmean_scalar(self):
x = 1.
M1 = x
M2 = stats.circmean(x)
assert_allclose(M2, M1, rtol=1e-5)
def test_circmean_range(self):
# regression test for gh-6420: circmean(..., high, low) must be
# between `high` and `low`
m = stats.circmean(np.arange(0, 2, 0.1), np.pi, -np.pi)
assert_(m < np.pi)
assert_(m > -np.pi)
def test_circfuncs_unit8(self):
# regression test for gh-7255: overflow when working with
# numpy uint8 data type
x = np.array([150, 10], dtype='uint8')
assert_equal(stats.circmean(x, high=180), 170.0)
assert_allclose(stats.circvar(x, high=180), 437.45871686, rtol=1e-7)
assert_allclose(stats.circstd(x, high=180), 20.91551378, rtol=1e-7)
def test_accuracy_wilcoxon():
freq = [1, 4, 16, 15, 8, 4, 5, 1, 2]
nums = range(-4, 5)
x = np.concatenate([[u] * v for u, v in zip(nums, freq)])
y = np.zeros(x.size)
T, p = stats.wilcoxon(x, y, "pratt")
assert_allclose(T, 423)
assert_allclose(p, 0.00197547303533107)
T, p = stats.wilcoxon(x, y, "zsplit")
assert_allclose(T, 441)
assert_allclose(p, 0.0032145343172473055)
T, p = stats.wilcoxon(x, y, "wilcox")
assert_allclose(T, 327)
assert_allclose(p, 0.00641346115861)
# Test the 'correction' option, using values computed in R with:
# > wilcox.test(x, y, paired=TRUE, exact=FALSE, correct={FALSE,TRUE})
x = np.array([120, 114, 181, 188, 180, 146, 121, 191, 132, 113, 127, 112])
y = np.array([133, 143, 119, 189, 112, 199, 198, 113, 115, 121, 142, 187])
T, p = stats.wilcoxon(x, y, correction=False)
assert_equal(T, 34)
assert_allclose(p, 0.6948866, rtol=1e-6)
T, p = stats.wilcoxon(x, y, correction=True)
assert_equal(T, 34)
assert_allclose(p, 0.7240817, rtol=1e-6)
def test_wilcoxon_result_attributes():
x = np.array([120, 114, 181, 188, 180, 146, 121, 191, 132, 113, 127, 112])
y = np.array([133, 143, 119, 189, 112, 199, 198, 113, 115, 121, 142, 187])
res = stats.wilcoxon(x, y, correction=False)
attributes = ('statistic', 'pvalue')
check_named_results(res, attributes)
def test_wilcoxon_tie():
# Regression test for gh-2391.
# Corresponding R code is:
# > result = wilcox.test(rep(0.1, 10), exact=FALSE, correct=FALSE)
# > result$p.value
# [1] 0.001565402
# > result = wilcox.test(rep(0.1, 10), exact=FALSE, correct=TRUE)
# > result$p.value
# [1] 0.001904195
stat, p = stats.wilcoxon([0.1] * 10)
expected_p = 0.001565402
assert_equal(stat, 0)
assert_allclose(p, expected_p, rtol=1e-6)
stat, p = stats.wilcoxon([0.1] * 10, correction=True)
expected_p = 0.001904195
assert_equal(stat, 0)
assert_allclose(p, expected_p, rtol=1e-6)
class TestMedianTest(object):
def test_bad_n_samples(self):
# median_test requires at least two samples.
assert_raises(ValueError, stats.median_test, [1, 2, 3])
def test_empty_sample(self):
# Each sample must contain at least one value.
assert_raises(ValueError, stats.median_test, [], [1, 2, 3])
def test_empty_when_ties_ignored(self):
# The grand median is 1, and all values in the first argument are
# equal to the grand median. With ties="ignore", those values are
# ignored, which results in the first sample being (in effect) empty.
# This should raise a ValueError.
assert_raises(ValueError, stats.median_test,
[1, 1, 1, 1], [2, 0, 1], [2, 0], ties="ignore")
def test_empty_contingency_row(self):
# The grand median is 1, and with the default ties="below", all the
# values in the samples are counted as being below the grand median.
# This would result a row of zeros in the contingency table, which is
# an error.
assert_raises(ValueError, stats.median_test, [1, 1, 1], [1, 1, 1])
# With ties="above", all the values are counted as above the
# grand median.
assert_raises(ValueError, stats.median_test, [1, 1, 1], [1, 1, 1],
ties="above")
def test_bad_ties(self):
assert_raises(ValueError, stats.median_test, [1, 2, 3], [4, 5],
ties="foo")
def test_bad_nan_policy(self):
assert_raises(ValueError, stats.median_test, [1, 2, 3], [4, 5], nan_policy='foobar')
def test_bad_keyword(self):
assert_raises(TypeError, stats.median_test, [1, 2, 3], [4, 5],
foo="foo")
def test_simple(self):
x = [1, 2, 3]
y = [1, 2, 3]
stat, p, med, tbl = stats.median_test(x, y)
# The median is floating point, but this equality test should be safe.
assert_equal(med, 2.0)
assert_array_equal(tbl, [[1, 1], [2, 2]])
# The expected values of the contingency table equal the contingency
# table, so the statistic should be 0 and the p-value should be 1.
assert_equal(stat, 0)
assert_equal(p, 1)
def test_ties_options(self):
# Test the contingency table calculation.
x = [1, 2, 3, 4]
y = [5, 6]
z = [7, 8, 9]
# grand median is 5.
# Default 'ties' option is "below".
stat, p, m, tbl = stats.median_test(x, y, z)
assert_equal(m, 5)
assert_equal(tbl, [[0, 1, 3], [4, 1, 0]])
stat, p, m, tbl = stats.median_test(x, y, z, ties="ignore")
assert_equal(m, 5)
assert_equal(tbl, [[0, 1, 3], [4, 0, 0]])
stat, p, m, tbl = stats.median_test(x, y, z, ties="above")
assert_equal(m, 5)
assert_equal(tbl, [[0, 2, 3], [4, 0, 0]])
def test_nan_policy_options(self):
x = [1, 2, np.nan]
y = [4, 5, 6]
mt1 = stats.median_test(x, y, nan_policy='propagate')
s, p, m, t = stats.median_test(x, y, nan_policy='omit')
assert_equal(mt1, (np.nan, np.nan, np.nan, None))
assert_allclose(s, 0.31250000000000006)
assert_allclose(p, 0.57615012203057869)
assert_equal(m, 4.0)
assert_equal(t, np.array([[0, 2],[2, 1]]))
assert_raises(ValueError, stats.median_test, x, y, nan_policy='raise')
def test_basic(self):
# median_test calls chi2_contingency to compute the test statistic
# and p-value. Make sure it hasn't screwed up the call...
x = [1, 2, 3, 4, 5]
y = [2, 4, 6, 8]
stat, p, m, tbl = stats.median_test(x, y)
assert_equal(m, 4)
assert_equal(tbl, [[1, 2], [4, 2]])
exp_stat, exp_p, dof, e = stats.chi2_contingency(tbl)
assert_allclose(stat, exp_stat)
assert_allclose(p, exp_p)
stat, p, m, tbl = stats.median_test(x, y, lambda_=0)
assert_equal(m, 4)
assert_equal(tbl, [[1, 2], [4, 2]])
exp_stat, exp_p, dof, e = stats.chi2_contingency(tbl, lambda_=0)
assert_allclose(stat, exp_stat)
assert_allclose(p, exp_p)
stat, p, m, tbl = stats.median_test(x, y, correction=False)
assert_equal(m, 4)
assert_equal(tbl, [[1, 2], [4, 2]])
exp_stat, exp_p, dof, e = stats.chi2_contingency(tbl, correction=False)
assert_allclose(stat, exp_stat)
assert_allclose(p, exp_p)
| gpl-3.0 | 5,094,190,005,229,645,000 | 37.915562 | 92 | 0.56443 | false |
ucloud/uai-sdk | examples/tensorflow/train/crnn_chinese/code/local_utils/tensorboard_vis_summary.py | 2 | 2460 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time : 18-2-28 下午4:49
# @Author : Luo Yao
# @Site : http://icode.baidu.com/repos/baidu/personal-code/Luoyao
# @File : tensorboard_vis_summary.py
# @IDE: PyCharm Community Edition
"""
实现一些tensorboard卷积神经网络可视化工具
"""
import tensorflow as tf
class CNNVisualizer(object):
"""
"""
def __init__(self):
pass
@staticmethod
def merge_weights_hist(weights_tensor_dict, scope=None, is_merge=True):
"""
:param weights_tensor_dict:
:param scope:
:param is_merge:
:return:
"""
ret = dict()
with tf.variable_scope(scope):
for tensor_name, tensor in weights_tensor_dict.items():
ret[tensor_name] = tf.summary.histogram(name=tensor_name, values=tensor)
if is_merge:
tensor_list = []
for _, tensor in ret.items():
tensor_list.append(tensor)
return tf.summary.merge(inputs=tensor_list)
else:
return ret
@staticmethod
def merge_conv_image(feature_map, scope=None, max_batch=3):
"""
:param feature_map:
:param scope:
:param max_batch:
:return:
"""
with tf.variable_scope(scope):
tensor_shape = feature_map.get_shape().as_list()
chs = tensor_shape[-1]
range_stop = chs // 3
size_splits = [3 for _ in range(range_stop)]
if len(size_splits) * 3 < chs:
size_splits.append(chs % 3)
feature_map_split = tf.split(feature_map, num_or_size_splits=size_splits, axis=3)
feature_map_concats_1 = []
concat_step = len(feature_map_split) // 2
for i in range(0, concat_step, 2):
concat = tf.concat([feature_map_split[i], feature_map_split[i + 1]], axis=1)
feature_map_concats_1.append(concat)
feature_map_concats_2 = []
concat_step = len(feature_map_concats_1) // 2
for i in range(0, concat_step, 2):
concat = tf.concat([feature_map_concats_1[i], feature_map_concats_1[i + 1]], axis=2)
feature_map_concats_2.append(concat)
feature_map_concats = tf.concat(feature_map_concats_2, axis=0)
return tf.summary.image("image", feature_map_concats, max_batch)
| apache-2.0 | 1,608,053,443,314,635,300 | 30.921053 | 100 | 0.546991 | false |
mikewied/cbagent | cbagent/collectors/libstats/net.py | 1 | 1769 | from collections import defaultdict
from cbagent.collectors.libstats.remotestats import (RemoteStats,
multi_node_task,
single_node_task)
class NetStat(RemoteStats):
def __init__(self, *args, **kwargs):
super(NetStat, self).__init__(*args, **kwargs)
self.iface = self.detect_iface()
@single_node_task
def detect_iface(self):
for iface in ("eth5", "eth0", "em1"):
result = self.run("grep {} /proc/net/dev".format(iface),
warn_only=True, quiet=True)
if not result.return_code:
return iface
def get_dev_stats(self):
cmd = "grep {} /proc/net/dev".format(self.iface)
stdout = self.run("{0}; sleep 1; {0}".format(cmd))
s1, s2 = stdout.split('\n')
s1 = [int(v.split(":")[-1]) for v in s1.split() if v.split(":")[-1]]
s2 = [int(v.split(":")[-1]) for v in s2.split() if v.split(":")[-1]]
return {
"in_bytes_per_sec": s2[0] - s1[0],
"out_bytes_per_sec": s2[8] - s1[8],
"in_packets_per_sec": s2[1] - s1[1],
"out_packets_per_sec": s2[9] - s1[9],
}
def get_tcp_stats(self):
stdout = self.run("cat /proc/net/tcp")
raw_data = defaultdict(int)
for conn in stdout.split("\n"):
state = conn.split()[3]
raw_data[state] += 1
return {
"ESTABLISHED": raw_data["01"],
"TIME_WAIT": raw_data["06"],
}
@multi_node_task
def get_samples(self):
dev_stats = self.get_dev_stats()
tcp_stats = self.get_tcp_stats()
return dict(dev_stats, **tcp_stats)
| apache-2.0 | -5,185,722,637,904,546,000 | 34.38 | 76 | 0.491238 | false |
engineer0x47/SCONS | build/lib/SCons/Tool/javah.py | 2 | 4613 | """SCons.Tool.javah
Tool-specific initialization for javah.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/javah.py 2014/08/24 12:12:31 garyo"
import os.path
import SCons.Action
import SCons.Builder
import SCons.Node.FS
import SCons.Tool.javac
import SCons.Util
def emit_java_headers(target, source, env):
"""Create and return lists of Java stub header files that will
be created from a set of class files.
"""
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except AttributeError:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[-len(class_suffix):] == class_suffix:
classname = classname[:-len(class_suffix)]
classname = SCons.Tool.javac.classname(classname)
s = src.rfile()
s.attributes.java_classname = classname
slist.append(s)
s = source[0].rfile()
if not hasattr(s.attributes, 'java_classdir'):
s.attributes.java_classdir = classdir
if target[0].__class__ is SCons.Node.FS.File:
tlist = target
else:
if not isinstance(target[0], SCons.Node.FS.Dir):
target[0].__class__ = SCons.Node.FS.Dir
target[0]._morph()
tlist = []
for s in source:
fname = s.attributes.java_classname.replace('.', '_') + '.h'
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source
def JavaHOutFlagGenerator(target, source, env, for_signature):
try:
t = target[0]
except (AttributeError, IndexError, TypeError):
t = target
try:
return '-d ' + str(t.attributes.java_lookupdir)
except AttributeError:
return '-o ' + str(t)
def getJavaHClassPath(env,target, source, for_signature):
path = "${SOURCE.attributes.java_classdir}"
if 'JAVACLASSPATH' in env and env['JAVACLASSPATH']:
path = SCons.Util.AppendPath(path, env['JAVACLASSPATH'])
return "-classpath %s" % (path)
def generate(env):
"""Add Builders and construction variables for javah to an Environment."""
java_javah = SCons.Tool.CreateJavaHBuilder(env)
java_javah.emitter = emit_java_headers
env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator
env['JAVAH'] = 'javah'
env['JAVAHFLAGS'] = SCons.Util.CLVar('')
env['_JAVAHCLASSPATH'] = getJavaHClassPath
env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class'
def exists(env):
return env.Detect('javah')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | -8,302,697,187,911,299,000 | 32.671533 | 119 | 0.65077 | false |
anthgur/servo | tests/wpt/web-platform-tests/webdriver/tests/element_send_keys/scroll_into_view.py | 11 | 2790 | from tests.support.asserts import assert_success
from tests.support.fixtures import is_element_in_viewport
from tests.support.inline import inline
def send_keys_to_element(session, element, text):
return session.transport.send(
"POST",
"/session/{session_id}/element/{element_id}/value".format(
session_id=session.session_id,
element_id=element.id),
{"text": text})
def test_element_outside_of_not_scrollable_viewport(session):
session.url = inline("<input style=\"position: relative; left: -9999px;\">")
element = session.find.css("input", all=False)
response = send_keys_to_element(session, element, "foo")
assert_success(response)
assert not is_element_in_viewport(session, element)
def test_element_outside_of_scrollable_viewport(session):
session.url = inline("<input style=\"margin-top: 102vh;\">")
element = session.find.css("input", all=False)
response = send_keys_to_element(session, element, "foo")
assert_success(response)
assert is_element_in_viewport(session, element)
def test_option_select_container_outside_of_scrollable_viewport(session):
session.url = inline("""
<select style="margin-top: 102vh;">
<option value="foo">foo</option>
<option value="bar" id="bar">bar</option>
</select>
""")
element = session.find.css("option#bar", all=False)
select = session.find.css("select", all=False)
response = send_keys_to_element(session, element, "bar")
assert_success(response)
assert is_element_in_viewport(session, select)
assert is_element_in_viewport(session, element)
def test_option_stays_outside_of_scrollable_viewport(session):
session.url = inline("""
<select multiple style="height: 105vh; margin-top: 100vh;">
<option value="foo" id="foo" style="height: 100vh;">foo</option>
<option value="bar" id="bar" style="background-color: yellow;">bar</option>
</select>
""")
select = session.find.css("select", all=False)
option_foo = session.find.css("option#foo", all=False)
option_bar = session.find.css("option#bar", all=False)
response = send_keys_to_element(session, option_bar, "bar")
assert_success(response)
assert is_element_in_viewport(session, select)
assert is_element_in_viewport(session, option_foo)
assert not is_element_in_viewport(session, option_bar)
def test_contenteditable_element_outside_of_scrollable_viewport(session):
session.url = inline("<div contenteditable style=\"margin-top: 102vh;\"></div>")
element = session.find.css("div", all=False)
response = send_keys_to_element(session, element, "foo")
assert_success(response)
assert is_element_in_viewport(session, element)
| mpl-2.0 | 7,449,961,674,898,999,000 | 34.769231 | 85 | 0.67957 | false |
sk89q/Plumeria | plumeria/command/__init__.py | 1 | 1690 | import argparse
import logging
from functools import wraps
from plumeria.command.exception import *
from plumeria.command.manager import Command, Mapping, CommandManager, Context, CommandError
from plumeria.command.parse import Parser
from plumeria.event import bus
from plumeria.message import Response
from plumeria.transaction import tx_log
from plumeria.util.ratelimit import MessageTokenBucket, RateLimitExceeded
__all__ = ('ArgumentParser', 'commands', 'global_tokens')
logger = logging.getLogger(__name__)
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise ArgumentError(message)
commands = CommandManager(('+', '@', ';', '.', '!', '/'))
global_tokens = MessageTokenBucket(20, 12, 8, 6, fill_rate=0.25)
@bus.event("message")
async def on_message(message):
if commands.matches_command(message.content):
try:
global_tokens.consume(message)
except RateLimitExceeded as e:
logger.warning(str(e))
return
response = await commands.execute(message, Context(), direct=True)
if response:
if not len(response.content) and not len(response.attachments) and not response.embed:
response = Response("\N{WARNING SIGN} Command returned empty text as a response.")
tx_log.add_response(message, await message.respond(response))
def channel_only(f):
@wraps(f)
async def wrapper(message, *args, **kwargs):
if not message.channel.is_private:
return await f(message, *args, **kwargs)
else:
raise CommandError("You can only use this on a server, not in a private message")
return wrapper
| mit | -7,651,772,241,881,949,000 | 32.137255 | 98 | 0.689941 | false |
MikeStitt/simple-locating | test_pixels.py | 1 | 15468 | #!/usr/bin/python
#
# See https://github.com/MikeStitt/simple-locating/blob/master/license.txt for license.
import math
import cv2
import numpy as np
import scipy as Sci
import scipy.linalg
import where
pi = math.pi
debug_label = ''
debug_pos_err = ''
target_name = { where.UNKNOWN: 'UN', where.LOW: 'BT', where.MID_UNKNOWN: 'MU', where.MID_LEFT: 'ML', where.MID_RIGHT: 'MR', where.TOP: 'TP' }
# x(+is E) y(+ is Up) z(+ is N)
test_locs = {
'ml-ul' : np.array([-27.38-12.0, 61.0+20.0, 0]),
'ml-ll' : np.array([-27.38-12.0, 61.0+ 2.0, 0]),
'ml-ur' : np.array([-27.38+12.0, 61.0+20.0, 0]),
'ml-lr' : np.array([-27.38+12.0, 61.0+ 2.0, 0]),
'mr-ul' : np.array([+27.38-12.0, 61.0+20.0, 0]),
'mr-ll' : np.array([+27.38-12.0, 61.0+ 2.0, 0]),
'mr-ur' : np.array([+27.38+12.0, 61.0+20.0, 0]),
'mr-lr' : np.array([+27.38+12.0, 61.0+ 2.0, 0]),
'bt-ul' : np.array([ -12.0, 28.0+20.0, 0]),
'bt-ll' : np.array([ -12.0, 28.0+ 2.0, 0]),
'bt-ur' : np.array([ +12.0, 28.0+20.0, 0]),
'bt-lr' : np.array([ +12.0, 28.0+ 2.0, 0]),
'tp-ul' : np.array([ -12.0, 98.0+20.0, 0]),
'tp-ll' : np.array([ -12.0, 98.0+ 2.0, 0]),
'tp-ur' : np.array([ +12.0, 98.0+20.0, 0]),
'tp-lr' : np.array([ +12.0, 98.0+ 2.0, 0]) }
#
# See http://en.wikipedia.org/wiki/Euler%E2%80%93Rodrigues_parameters
#
def rotation_matrix(axis,theta):
axis = axis/np.sqrt(np.dot(axis,axis))
a = np.cos(theta/2)
b,c,d = -axis*np.sin(theta/2)
return np.array([[a*a+b*b-c*c-d*d, 2*(b*c-a*d), 2*(b*d+a*c)],
[2*(b*c+a*d), a*a+c*c-b*b-d*d, 2*(c*d-a*b)],
[2*(b*d-a*c), 2*(c*d+a*b), a*a+d*d-b*b-c*c]])
v = np.array([3,5,4])
axis = np.array([0,0,1])
theta = pi
#focal length = d / ( 2 * tan ( angle_of_view / 2 ) )
fl = 320.0 / ( 2.0 * math.tan( math.radians(43.5)/2.0 ) )
cameraMatrix = np.array([ np.array([fl, 0, 160]),
np.array([0 , fl, 120]),
np.array([0 , 0, 1]) ])
distCoeff = np.float64([0,0,0,0])
def get_sides( ul, ll, ur, lr ):
# use ceil and floor to shorten boxes at partial pixels...
#
return [ float(math.ceil (min(ul[0],ll[0]))), # left
float(math.floor(max(ur[0],lr[0]))), # right
float(math.floor(max(ul[1],ur[1]))), # top
float(math.ceil (min(ll[1],lr[1]))) ] # bottom
def construct_test_image( az_rot, pitch_rot, pos_x, pos_y, pos_z ):
projected = {}
rectangles = []
y_axis = np.array([0,1,0])
az_rot_matrix = rotation_matrix(y_axis,az_rot)
x_axis = np.array([1,0,0])
el_rot_matrix = rotation_matrix(x_axis,pitch_rot)
sum_rot_matrix = np.dot(el_rot_matrix,az_rot_matrix)
for k, a in test_locs.iteritems():
p = cv2.projectPoints(np.array([a + [-pos_x,-pos_y,-pos_z]]), sum_rot_matrix, np.float64([0,0,0]), cameraMatrix, distCoeff)[0][0][0]
if ( 0 <= p[0] < 319 ) and ( 0 <= p[1] < 239 ):
projected[k] = p
if ('ml-ul' in projected) and ('ml-ll' in projected) and ('ml-ur' in projected) and ('ml-lr' in projected):
rectangles.append( get_sides( projected['ml-ul'], projected['ml-ll'], projected['ml-ur'], projected['ml-lr'] ) )
if ('mr-ul' in projected) and ('mr-ll' in projected) and ('mr-ur' in projected) and ('mr-lr' in projected):
rectangles.append( get_sides( projected['mr-ul'], projected['mr-ll'], projected['mr-ur'], projected['mr-lr'] ) )
if ('bt-ul' in projected) and ('bt-ll' in projected) and ('bt-ur' in projected) and ('bt-lr' in projected):
rectangles.append( get_sides( projected['bt-ul'], projected['bt-ll'], projected['bt-ur'], projected['bt-lr'] ) )
if ('tp-ul' in projected) and ('tp-ll' in projected) and ('tp-ur' in projected) and ('tp-lr' in projected):
rectangles.append( get_sides( projected['tp-ul'], projected['tp-ll'], projected['tp-ur'], projected['tp-lr'] ) )
return rectangles
def test_cases():
global debug_label
global debug_pos_err
f_csv = open( 'output.csv', 'w' )
printedHeader = 0
rms_clc_a_err = 0.0
rms_clc_r_err = 0.0
cnt = 0
for south in range (60, 241, 10):
for east in range (-60, +61, 10):
for t in (where.MID_LEFT, where.LOW, where.MID_RIGHT):
az = math.degrees(math.atan2( where.target_locs[t].center_east-east, south+15.0 ))
debug_label = 'az={0:6.1f}(deg) e={1:6.1f}(in) s={2:6.1f}(in)'.format(az, east, south)
#
# Step 0g
#
# Project the image on to the camera, identify the complete targets in the field of view
#
constructed_rectangles = construct_test_image(
math.radians(float(az)), # Rotate Right - (Azimuth) - radians
0.0, # Tilt Up - (Elevation) - radians
float(east), # Shift Right - (East) - inches
54.0, # Shift Up - (Up) - inches
float(-south) ) # Shift Forward- (North) - inches
#
# Start with an empty list of targets
targets = []
# Start with a empty csv and header list
header = []
csv = []
header.append( "south (in)" )
csv.append( south )
header.append( "east (in)" )
csv.append( east )
header.append( "az (deg)" )
csv.append( az )
#
# Perform Step 1 on all the target rectangles in the field of view
#
for r in constructed_rectangles:
# edges: left, right, top, bottom : in pixels
targets.append( where.target( r[0], r[1], r[2], r[3] ) )
header.append( "left (pix)" )
csv.append( r[0] )
header.append( "rght (pix)" )
csv.append( r[1] )
header.append( "top (pix)" )
csv.append( r[2] )
header.append( "bot (pix)" )
csv.append( r[3] )
for i in range( len(constructed_rectangles), 4 ):
header.append( "left (pix)" )
csv.append( -1 )
header.append( "rght (pix)" )
csv.append( -1 )
header.append( "top (pix)" )
csv.append( -1 )
header.append( "bot (pix)" )
csv.append( -1 )
# Perform Steps 2 through 12 on the target set of rectangles in the field of view
#
calc_az, calc_east, calc_south = where.where( targets )
for tgt in targets:
header.append( "left (rad)" )
csv.append( tgt.left_rad )
header.append( "rght (rad)" )
csv.append( tgt.right_rad )
header.append( "top (rad)" )
csv.append( tgt.top_rad )
header.append( "bot (rad)" )
csv.append( tgt.bottom_rad )
header.append( "ctr-az-rad" )
csv.append( tgt.azimuth_rad )
header.append( "ctr-el-rad" )
csv.append( tgt.elevation_rad )
header.append( "d_est_1-in" )
csv.append( tgt.dist_est_1 )
header.append( "h_est_1-in" )
csv.append( tgt.height_est_1 )
header.append( "level " )
csv.append( target_name[tgt.level] )
header.append( "position " )
csv.append( target_name[tgt.pos] )
for i in range( len(targets), 4 ):
header.append( "left (rad)" )
csv.append( "NaN" )
header.append( "rght (rad)" )
csv.append( "NaN" )
header.append( "top (rad)" )
csv.append( "NaN" )
header.append( "bot (rad)" )
csv.append( "NaN" )
header.append( "ctr-az-rad" )
csv.append( "NaN" )
header.append( "ctr-el-rad" )
csv.append( "NaN" )
header.append( "d_est_1-in" )
csv.append( "NaN" )
header.append( "h_est_1-in" )
csv.append( "NaN" )
header.append( "level " )
csv.append( target_name[where.UNKNOWN] )
header.append( "position " )
csv.append( target_name[where.UNKNOWN] )
header.append( "leftmost " )
csv.append( where.leftmost )
header.append( "rightmost " )
csv.append( where.rightmost )
header.append( "south1 " )
csv.append( where.south1 )
header.append( "east1 " )
csv.append( where.east1 )
header.append( "az1 " )
csv.append( where.az1 )
header.append( "south2 " )
csv.append( where.south2 )
header.append( "east2" )
csv.append( where.east2 )
header.append( "az2 " )
csv.append( where.az2 )
header.append( "south " )
csv.append( calc_south )
header.append( "east " )
csv.append( calc_east )
header.append( "az " )
csv.append( calc_az )
# calc_south = -1000 if we did not find two targets in the field of view
#
# if we found at least 2 targets in the camera field of view
if calc_south != -1000 :
debug_pos_err = 'heading-err={0:6.1f}(deg) east-err={1:6.1f}(in) south-err={2:6.1f}(in)'.format(
az-math.degrees(calc_az), calc_east-east, calc_south - south)
#
# Find the target we were aiming at in this test case
for r in targets:
if r.pos == t :
#
# Perform step 13
# Calculate the azimuth offset from the center of the backboard to the
# center of the hoop
calc_target_az, calc_az_offset = where.target_backboard_az_and_az_offset(
r, calc_east, calc_south )
header.append( "az-off-rad" )
csv.append( calc_az_offset )
#
# Perform step 14
# Calculate the range from the camera to the center of the hoop
calc_target_range = where.target_range( r, calc_east, calc_south )
header.append( "range (in)" )
csv.append( calc_target_range )
#
# Calculate the actual (ideal) azimuth offset and range assuming
# that we had no errors calculating where we were at and calculating
# our heading
#
actual_target_az, az_offset = where.target_backboard_az_and_az_offset(
r, east, south )
actual_target_range = where.target_range( r, east, south)
#
# Accumulate Root Mean Square (RMS) Heading and Range for this test run
#
cnt = cnt + 1
rms_clc_a_err = rms_clc_a_err + math.pow( calc_az_offset-az_offset,2 )
rms_clc_r_err = rms_clc_a_err + math.pow( calc_target_range-actual_target_range,2 )
print '{0:s} {1:s} in-view:{2:s} target:{3:s} az-err-to-hoop={4:4.1f}(deg) range-err-to-hoop={5:4.1f}(in)'.format(
debug_label, debug_pos_err, where.debug_found, target_name[r.pos],
math.degrees(calc_az_offset-az_offset), calc_target_range - actual_target_range)
if printedHeader == 0:
comma = ''
for i in header:
f_csv.write( '{0:s}{1:>15s}'.format( comma, i ) )
comma = ','
printedHeader = 1
f_csv.write( "\n" )
comma = ''
for i in csv:
if type(i) == str:
f_csv.write( '{0:s}{1:>15s}'.format( comma, i ) )
if type(i) == int:
f_csv.write( '{0:s}{1:>15d}'.format( comma, i ) )
if type(i) == float:
f_csv.write( '{0:s}{1:>15f}'.format( comma, i ) )
comma = ','
f_csv.write( "\n" )
else:
debug_pos_err = '---------------------------------------'
#
# Print the RMS errors
#
print 'rms_clc_r_err={0:10.7f} rms_clc_a_err={1:10.7f}'.format( math.sqrt(rms_clc_r_err/cnt), math.degrees(math.sqrt(rms_clc_a_err/cnt)) )
f_csv.close()
#
# Run the test cases
#
test_cases()
| mit | 4,963,036,012,235,682,000 | 48.736334 | 142 | 0.400116 | false |
aam-at/tensorflow | tensorflow/python/keras/layers/einsum_dense.py | 5 | 13533 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras-based einsum dense layer."""
# pylint: disable=g-classes-have-attributes
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import activations
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.ops import special_math_ops
from tensorflow.python.util.tf_export import keras_export
@keras_export("keras.layers.experimental.EinsumDense")
class EinsumDense(Layer):
"""A layer that uses tf.einsum as the backing computation.
This layer can perform einsum calculations of arbitrary dimensionality.
Arguments:
equation: An equation describing the einsum to perform. This equation must
be a valid einsum string of the form `ab,bc->ac`, `...ab,bc->...ac`, or
`ab...,bc->ac...` where 'ab', 'bc', and 'ac' can be any valid einsum axis
expression sequence.
output_shape: The expected shape of the output tensor (excluding the batch
dimension and any dimensions represented by ellipses). You can specify
None for any dimension that is unknown or can be inferred from the input
shape.
activation: Activation function to use. If you don't specify anything, no
activation is applied (that is, a "linear" activation: `a(x) = x`).
bias_axes: A string containing the output dimension(s) to apply a bias to.
Each character in the `bias_axes` string should correspond to a character
in the output portion of the `equation` string.
kernel_initializer: Initializer for the `kernel` weights matrix.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to the `kernel` weights
matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to the output of the
layer (its "activation")..
kernel_constraint: Constraint function applied to the `kernel` weights
matrix.
bias_constraint: Constraint function applied to the bias vector.
Examples:
**Biased dense layer with einsums**
This example shows how to instantiate a standard Keras dense layer using
einsum operations. This example is equivalent to
`tf.keras.layers.Dense(64, use_bias=True)`.
>>> layer = EinsumDense("ab,bc->ac", output_shape=64, bias_axes="c")
>>> input_tensor = tf.keras.Input(shape=[32])
>>> output_tensor = layer(input_tensor)
>>> output_tensor
<... shape=(None, 64) dtype=...>
**Applying a dense layer to a sequence**
This example shows how to instantiate a layer that applies the same dense
operation to every element in a sequence. Here, the 'output_shape' has two
values (since there are two non-batch dimensions in the output); the first
dimension in the output_shape is `None`, because the sequence dimension `b`
has an unknown shape.
>>> layer = EinsumDense("abc,cd->abd",
... output_shape=(None, 64),
... bias_axes="d")
>>> input_tensor = tf.keras.Input(shape=[32, 128])
>>> output_tensor = layer(input_tensor)
>>> output_tensor
<... shape=(None, 32, 64) dtype=...>
**Applying a dense layer to a sequence using ellipses**
This example shows how to instantiate a layer that applies the same dense
operation to every element in a sequence, but uses the ellipsis notation
instead of specifying the batch and sequence dimensions.
Because we are using ellipsis notation and have specified only one axis, the
output_shape arg is a single value. When instantiated in this way, the layer
can handle any number of sequence dimensions - including the case where no
sequence dimension exists.
>>> layer = EinsumDense("...x,xy->...y", output_shape=64, bias_axes="y")
>>> input_tensor = tf.keras.Input(shape=[32, 128])
>>> output_tensor = layer(input_tensor)
>>> output_tensor
<... shape=(None, 32, 64) dtype=...>
"""
def __init__(self,
equation,
output_shape,
activation=None,
bias_axes=None,
kernel_initializer="glorot_uniform",
bias_initializer="zeros",
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super(EinsumDense, self).__init__(**kwargs)
self.equation = equation
if isinstance(output_shape, int):
self.partial_output_shape = [output_shape]
else:
self.partial_output_shape = list(output_shape)
self.bias_axes = bias_axes
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
shape_data = _analyze_einsum_string(self.equation,
self.bias_axes,
input_shape,
self.partial_output_shape)
kernel_shape, bias_shape, self.full_output_shape = shape_data
self.kernel = self.add_weight(
"kernel",
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
dtype=self.dtype,
trainable=True)
if bias_shape is not None:
self.bias = self.add_weight(
"bias",
shape=bias_shape,
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
dtype=self.dtype,
trainable=True)
else:
self.bias = None
super(EinsumDense, self).build(input_shape)
def compute_output_shape(self, _):
return tensor_shape.TensorShape(self.full_output_shape)
def get_config(self):
config = {
"output_shape":
self.partial_output_shape,
"equation":
self.equation,
"activation":
activations.serialize(self.activation),
"bias_axes":
self.bias_axes,
"kernel_initializer":
initializers.serialize(self.kernel_initializer),
"bias_initializer":
initializers.serialize(self.bias_initializer),
"kernel_regularizer":
regularizers.serialize(self.kernel_regularizer),
"bias_regularizer":
regularizers.serialize(self.bias_regularizer),
"activity_regularizer":
regularizers.serialize(self.activity_regularizer),
"kernel_constraint":
constraints.serialize(self.kernel_constraint),
"bias_constraint":
constraints.serialize(self.bias_constraint),
}
base_config = super(EinsumDense, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def call(self, inputs):
ret = special_math_ops.einsum(self.equation, inputs, self.kernel)
if self.bias is not None:
ret += self.bias
if self.activation is not None:
ret = self.activation(ret)
return ret
def _analyze_einsum_string(equation, bias_axes, input_shape, output_shape):
"""Analyzes an einsum string to determine the required weight shape."""
dot_replaced_string = re.sub(r"\.\.\.", "0", equation)
# This is the case where no ellipses are present in the string.
split_string = re.match("([a-zA-Z]+),([a-zA-Z]+)->([a-zA-Z]+)",
dot_replaced_string)
if split_string:
return _analyze_split_string(split_string, bias_axes, input_shape,
output_shape)
# This is the case where ellipses are present on the left.
split_string = re.match("0([a-zA-Z]+),([a-zA-Z]+)->0([a-zA-Z]+)",
dot_replaced_string)
if split_string:
return _analyze_split_string(
split_string, bias_axes, input_shape, output_shape, left_elided=True)
# This is the case where ellipses are present on the right.
split_string = re.match("([a-zA-Z]{2,})0,([a-zA-Z]+)->([a-zA-Z]+)0",
dot_replaced_string)
if split_string:
return _analyze_split_string(split_string, bias_axes, input_shape,
output_shape)
raise ValueError(
"Invalid einsum equation '%s'. Equations must be in the form "
"[X],[Y]->[Z], ...[X],[Y]->...[Z], or [X]...,[Y]->[Z]...." % equation)
def _analyze_split_string(split_string,
bias_axes,
input_shape,
output_shape,
left_elided=False):
"""Analyze an pre-split einsum string to find the weight shape."""
input_spec = split_string.group(1)
weight_spec = split_string.group(2)
output_spec = split_string.group(3)
elided = len(input_shape) - len(input_spec)
if isinstance(output_shape, int):
output_shape = [output_shape]
else:
output_shape = list(output_shape)
output_shape.insert(0, input_shape[0])
if elided > 0 and left_elided:
for i in range(1, elided):
# We already inserted the 0th input dimension at dim 0, so we need to
# start at location 1 here.
output_shape.insert(1, input_shape[i])
elif elided > 0 and not left_elided:
for i in range(len(input_shape) - elided, len(input_shape)):
output_shape.append(input_shape[i])
if left_elided:
# If we have beginning dimensions elided, we need to use negative indexing
# to determine where in the input dimension our values are.
input_dim_map = {
dim: (i + elided) - len(input_shape) for i, dim in enumerate(input_spec)
}
# Because we've constructed the full output shape already, we don't need
# to do negative indexing.
output_dim_map = {dim: (i + elided) for i, dim in enumerate(output_spec)}
else:
input_dim_map = {dim: i for i, dim in enumerate(input_spec)}
output_dim_map = {dim: i for i, dim in enumerate(output_spec)}
for i, dim in enumerate(input_spec):
input_shape_at_dim = input_shape[i]
if dim in output_dim_map:
output_shape_at_dim = output_shape[output_dim_map[dim]]
if (output_shape_at_dim is not None and
output_shape_at_dim != input_shape_at_dim):
raise ValueError(
"Input shape and output shape do not match at shared "
"dimension '%s'. Input shape is %s, and output shape "
"is %s." %
(dim, input_shape_at_dim, output_shape[output_dim_map[dim]]))
for dim in output_spec:
if dim not in input_spec and dim not in weight_spec:
raise ValueError("Dimension '%s' was specified in the output '%s' but "
"has no corresponding dim in the input spec '%s' or "
"weight spec '%s.'" % (dim, output_spec, input_spec,
output_spec))
weight_shape = []
for dim in weight_spec:
if dim in input_dim_map:
weight_shape.append(input_shape[input_dim_map[dim]])
elif dim in output_dim_map:
weight_shape.append(output_shape[output_dim_map[dim]])
else:
raise ValueError("Weight dimension '%s' did not have a match in either "
"the input spec '%s' or the output spec '%s'. For this "
"layer, the weight must be fully specified." %
(dim, input_spec, output_spec))
if bias_axes is not None:
num_left_elided = elided if left_elided else 0
idx_map = {
char: output_shape[i + num_left_elided]
for i, char in enumerate(output_spec)
}
for char in bias_axes:
if char not in output_spec:
raise ValueError("Bias dimension '%s' was requested, but is not a part "
"of the output specification '%s'" %
(char, output_spec))
first_bias_location = min([output_spec.find(char) for char in bias_axes])
bias_output_spec = output_spec[first_bias_location:]
bias_shape = [
idx_map[char] if char in bias_axes else 1 for char in bias_output_spec
]
if not left_elided:
for _ in range(elided):
bias_shape.append(1)
else:
bias_shape = None
return weight_shape, bias_shape, output_shape
| apache-2.0 | -6,643,406,939,624,919,000 | 39.15727 | 80 | 0.640656 | false |
TathagataChakraborti/resource-conflicts | PLANROB-2015/py2.5/lib/python2.5/bsddb/dbtables.py | 6 | 25629 | #-----------------------------------------------------------------------
#
# Copyright (C) 2000, 2001 by Autonomous Zone Industries
# Copyright (C) 2002 Gregory P. Smith
#
# License: This is free software. You may use this software for any
# purpose including modification/redistribution, so long as
# this header remains intact and that you do not claim any
# rights of ownership or authorship of this software. This
# software has been tested, but no warranty is expressed or
# implied.
#
# -- Gregory P. Smith <[email protected]>
# This provides a simple database table interface built on top of
# the Python BerkeleyDB 3 interface.
#
_cvsid = '$Id: dbtables.py 58760 2007-11-01 21:22:40Z gregory.p.smith $'
import re
import sys
import copy
import struct
import random
from types import ListType, StringType
import cPickle as pickle
try:
# For Pythons w/distutils pybsddb
from bsddb3.db import *
except ImportError:
# For Python 2.3
from bsddb.db import *
# XXX(nnorwitz): is this correct? DBIncompleteError is conditional in _bsddb.c
try:
DBIncompleteError
except NameError:
class DBIncompleteError(Exception):
pass
class TableDBError(StandardError):
pass
class TableAlreadyExists(TableDBError):
pass
class Cond:
"""This condition matches everything"""
def __call__(self, s):
return 1
class ExactCond(Cond):
"""Acts as an exact match condition function"""
def __init__(self, strtomatch):
self.strtomatch = strtomatch
def __call__(self, s):
return s == self.strtomatch
class PrefixCond(Cond):
"""Acts as a condition function for matching a string prefix"""
def __init__(self, prefix):
self.prefix = prefix
def __call__(self, s):
return s[:len(self.prefix)] == self.prefix
class PostfixCond(Cond):
"""Acts as a condition function for matching a string postfix"""
def __init__(self, postfix):
self.postfix = postfix
def __call__(self, s):
return s[-len(self.postfix):] == self.postfix
class LikeCond(Cond):
"""
Acts as a function that will match using an SQL 'LIKE' style
string. Case insensitive and % signs are wild cards.
This isn't perfect but it should work for the simple common cases.
"""
def __init__(self, likestr, re_flags=re.IGNORECASE):
# escape python re characters
chars_to_escape = '.*+()[]?'
for char in chars_to_escape :
likestr = likestr.replace(char, '\\'+char)
# convert %s to wildcards
self.likestr = likestr.replace('%', '.*')
self.re = re.compile('^'+self.likestr+'$', re_flags)
def __call__(self, s):
return self.re.match(s)
#
# keys used to store database metadata
#
_table_names_key = '__TABLE_NAMES__' # list of the tables in this db
_columns = '._COLUMNS__' # table_name+this key contains a list of columns
def _columns_key(table):
return table + _columns
#
# these keys are found within table sub databases
#
_data = '._DATA_.' # this+column+this+rowid key contains table data
_rowid = '._ROWID_.' # this+rowid+this key contains a unique entry for each
# row in the table. (no data is stored)
_rowid_str_len = 8 # length in bytes of the unique rowid strings
def _data_key(table, col, rowid):
return table + _data + col + _data + rowid
def _search_col_data_key(table, col):
return table + _data + col + _data
def _search_all_data_key(table):
return table + _data
def _rowid_key(table, rowid):
return table + _rowid + rowid + _rowid
def _search_rowid_key(table):
return table + _rowid
def contains_metastrings(s) :
"""Verify that the given string does not contain any
metadata strings that might interfere with dbtables database operation.
"""
if (s.find(_table_names_key) >= 0 or
s.find(_columns) >= 0 or
s.find(_data) >= 0 or
s.find(_rowid) >= 0):
# Then
return 1
else:
return 0
class bsdTableDB :
def __init__(self, filename, dbhome, create=0, truncate=0, mode=0600,
recover=0, dbflags=0):
"""bsdTableDB(filename, dbhome, create=0, truncate=0, mode=0600)
Open database name in the dbhome BerkeleyDB directory.
Use keyword arguments when calling this constructor.
"""
self.db = None
myflags = DB_THREAD
if create:
myflags |= DB_CREATE
flagsforenv = (DB_INIT_MPOOL | DB_INIT_LOCK | DB_INIT_LOG |
DB_INIT_TXN | dbflags)
# DB_AUTO_COMMIT isn't a valid flag for env.open()
try:
dbflags |= DB_AUTO_COMMIT
except AttributeError:
pass
if recover:
flagsforenv = flagsforenv | DB_RECOVER
self.env = DBEnv()
# enable auto deadlock avoidance
self.env.set_lk_detect(DB_LOCK_DEFAULT)
self.env.open(dbhome, myflags | flagsforenv)
if truncate:
myflags |= DB_TRUNCATE
self.db = DB(self.env)
# this code relies on DBCursor.set* methods to raise exceptions
# rather than returning None
self.db.set_get_returns_none(1)
# allow duplicate entries [warning: be careful w/ metadata]
self.db.set_flags(DB_DUP)
self.db.open(filename, DB_BTREE, dbflags | myflags, mode)
self.dbfilename = filename
# Initialize the table names list if this is a new database
txn = self.env.txn_begin()
try:
if not self.db.has_key(_table_names_key, txn):
self.db.put(_table_names_key, pickle.dumps([], 1), txn=txn)
# Yes, bare except
except:
txn.abort()
raise
else:
txn.commit()
# TODO verify more of the database's metadata?
self.__tablecolumns = {}
def __del__(self):
self.close()
def close(self):
if self.db is not None:
self.db.close()
self.db = None
if self.env is not None:
self.env.close()
self.env = None
def checkpoint(self, mins=0):
try:
self.env.txn_checkpoint(mins)
except DBIncompleteError:
pass
def sync(self):
try:
self.db.sync()
except DBIncompleteError:
pass
def _db_print(self) :
"""Print the database to stdout for debugging"""
print "******** Printing raw database for debugging ********"
cur = self.db.cursor()
try:
key, data = cur.first()
while 1:
print repr({key: data})
next = cur.next()
if next:
key, data = next
else:
cur.close()
return
except DBNotFoundError:
cur.close()
def CreateTable(self, table, columns):
"""CreateTable(table, columns) - Create a new table in the database.
raises TableDBError if it already exists or for other DB errors.
"""
assert isinstance(columns, ListType)
txn = None
try:
# checking sanity of the table and column names here on
# table creation will prevent problems elsewhere.
if contains_metastrings(table):
raise ValueError(
"bad table name: contains reserved metastrings")
for column in columns :
if contains_metastrings(column):
raise ValueError(
"bad column name: contains reserved metastrings")
columnlist_key = _columns_key(table)
if self.db.has_key(columnlist_key):
raise TableAlreadyExists, "table already exists"
txn = self.env.txn_begin()
# store the table's column info
self.db.put(columnlist_key, pickle.dumps(columns, 1), txn=txn)
# add the table name to the tablelist
tablelist = pickle.loads(self.db.get(_table_names_key, txn=txn,
flags=DB_RMW))
tablelist.append(table)
# delete 1st, in case we opened with DB_DUP
self.db.delete(_table_names_key, txn)
self.db.put(_table_names_key, pickle.dumps(tablelist, 1), txn=txn)
txn.commit()
txn = None
except DBError, dberror:
if txn:
txn.abort()
raise TableDBError, dberror[1]
def ListTableColumns(self, table):
"""Return a list of columns in the given table.
[] if the table doesn't exist.
"""
assert isinstance(table, StringType)
if contains_metastrings(table):
raise ValueError, "bad table name: contains reserved metastrings"
columnlist_key = _columns_key(table)
if not self.db.has_key(columnlist_key):
return []
pickledcolumnlist = self.db.get(columnlist_key)
if pickledcolumnlist:
return pickle.loads(pickledcolumnlist)
else:
return []
def ListTables(self):
"""Return a list of tables in this database."""
pickledtablelist = self.db.get(_table_names_key)
if pickledtablelist:
return pickle.loads(pickledtablelist)
else:
return []
def CreateOrExtendTable(self, table, columns):
"""CreateOrExtendTable(table, columns)
Create a new table in the database.
If a table of this name already exists, extend it to have any
additional columns present in the given list as well as
all of its current columns.
"""
assert isinstance(columns, ListType)
try:
self.CreateTable(table, columns)
except TableAlreadyExists:
# the table already existed, add any new columns
txn = None
try:
columnlist_key = _columns_key(table)
txn = self.env.txn_begin()
# load the current column list
oldcolumnlist = pickle.loads(
self.db.get(columnlist_key, txn=txn, flags=DB_RMW))
# create a hash table for fast lookups of column names in the
# loop below
oldcolumnhash = {}
for c in oldcolumnlist:
oldcolumnhash[c] = c
# create a new column list containing both the old and new
# column names
newcolumnlist = copy.copy(oldcolumnlist)
for c in columns:
if not oldcolumnhash.has_key(c):
newcolumnlist.append(c)
# store the table's new extended column list
if newcolumnlist != oldcolumnlist :
# delete the old one first since we opened with DB_DUP
self.db.delete(columnlist_key, txn)
self.db.put(columnlist_key,
pickle.dumps(newcolumnlist, 1),
txn=txn)
txn.commit()
txn = None
self.__load_column_info(table)
except DBError, dberror:
if txn:
txn.abort()
raise TableDBError, dberror[1]
def __load_column_info(self, table) :
"""initialize the self.__tablecolumns dict"""
# check the column names
try:
tcolpickles = self.db.get(_columns_key(table))
except DBNotFoundError:
raise TableDBError, "unknown table: %r" % (table,)
if not tcolpickles:
raise TableDBError, "unknown table: %r" % (table,)
self.__tablecolumns[table] = pickle.loads(tcolpickles)
def __new_rowid(self, table, txn) :
"""Create a new unique row identifier"""
unique = 0
while not unique:
# Generate a random 64-bit row ID string
# (note: might have <64 bits of true randomness
# but it's plenty for our database id needs!)
blist = []
for x in xrange(_rowid_str_len):
blist.append(random.randint(0,255))
newid = struct.pack('B'*_rowid_str_len, *blist)
# Guarantee uniqueness by adding this key to the database
try:
self.db.put(_rowid_key(table, newid), None, txn=txn,
flags=DB_NOOVERWRITE)
except DBKeyExistError:
pass
else:
unique = 1
return newid
def Insert(self, table, rowdict) :
"""Insert(table, datadict) - Insert a new row into the table
using the keys+values from rowdict as the column values.
"""
txn = None
try:
if not self.db.has_key(_columns_key(table)):
raise TableDBError, "unknown table"
# check the validity of each column name
if not self.__tablecolumns.has_key(table):
self.__load_column_info(table)
for column in rowdict.keys() :
if not self.__tablecolumns[table].count(column):
raise TableDBError, "unknown column: %r" % (column,)
# get a unique row identifier for this row
txn = self.env.txn_begin()
rowid = self.__new_rowid(table, txn=txn)
# insert the row values into the table database
for column, dataitem in rowdict.items():
# store the value
self.db.put(_data_key(table, column, rowid), dataitem, txn=txn)
txn.commit()
txn = None
except DBError, dberror:
# WIBNI we could just abort the txn and re-raise the exception?
# But no, because TableDBError is not related to DBError via
# inheritance, so it would be backwards incompatible. Do the next
# best thing.
info = sys.exc_info()
if txn:
txn.abort()
self.db.delete(_rowid_key(table, rowid))
raise TableDBError, dberror[1], info[2]
def Modify(self, table, conditions={}, mappings={}):
"""Modify(table, conditions={}, mappings={}) - Modify items in rows matching 'conditions' using mapping functions in 'mappings'
* table - the table name
* conditions - a dictionary keyed on column names containing
a condition callable expecting the data string as an
argument and returning a boolean.
* mappings - a dictionary keyed on column names containing a
condition callable expecting the data string as an argument and
returning the new string for that column.
"""
try:
matching_rowids = self.__Select(table, [], conditions)
# modify only requested columns
columns = mappings.keys()
for rowid in matching_rowids.keys():
txn = None
try:
for column in columns:
txn = self.env.txn_begin()
# modify the requested column
try:
dataitem = self.db.get(
_data_key(table, column, rowid),
txn=txn)
self.db.delete(
_data_key(table, column, rowid),
txn)
except DBNotFoundError:
# XXXXXXX row key somehow didn't exist, assume no
# error
dataitem = None
dataitem = mappings[column](dataitem)
if dataitem <> None:
self.db.put(
_data_key(table, column, rowid),
dataitem, txn=txn)
txn.commit()
txn = None
# catch all exceptions here since we call unknown callables
except:
if txn:
txn.abort()
raise
except DBError, dberror:
raise TableDBError, dberror[1]
def Delete(self, table, conditions={}):
"""Delete(table, conditions) - Delete items matching the given
conditions from the table.
* conditions - a dictionary keyed on column names containing
condition functions expecting the data string as an
argument and returning a boolean.
"""
try:
matching_rowids = self.__Select(table, [], conditions)
# delete row data from all columns
columns = self.__tablecolumns[table]
for rowid in matching_rowids.keys():
txn = None
try:
txn = self.env.txn_begin()
for column in columns:
# delete the data key
try:
self.db.delete(_data_key(table, column, rowid),
txn)
except DBNotFoundError:
# XXXXXXX column may not exist, assume no error
pass
try:
self.db.delete(_rowid_key(table, rowid), txn)
except DBNotFoundError:
# XXXXXXX row key somehow didn't exist, assume no error
pass
txn.commit()
txn = None
except DBError, dberror:
if txn:
txn.abort()
raise
except DBError, dberror:
raise TableDBError, dberror[1]
def Select(self, table, columns, conditions={}):
"""Select(table, columns, conditions) - retrieve specific row data
Returns a list of row column->value mapping dictionaries.
* columns - a list of which column data to return. If
columns is None, all columns will be returned.
* conditions - a dictionary keyed on column names
containing callable conditions expecting the data string as an
argument and returning a boolean.
"""
try:
if not self.__tablecolumns.has_key(table):
self.__load_column_info(table)
if columns is None:
columns = self.__tablecolumns[table]
matching_rowids = self.__Select(table, columns, conditions)
except DBError, dberror:
raise TableDBError, dberror[1]
# return the matches as a list of dictionaries
return matching_rowids.values()
def __Select(self, table, columns, conditions):
"""__Select() - Used to implement Select and Delete (above)
Returns a dictionary keyed on rowids containing dicts
holding the row data for columns listed in the columns param
that match the given conditions.
* conditions is a dictionary keyed on column names
containing callable conditions expecting the data string as an
argument and returning a boolean.
"""
# check the validity of each column name
if not self.__tablecolumns.has_key(table):
self.__load_column_info(table)
if columns is None:
columns = self.tablecolumns[table]
for column in (columns + conditions.keys()):
if not self.__tablecolumns[table].count(column):
raise TableDBError, "unknown column: %r" % (column,)
# keyed on rows that match so far, containings dicts keyed on
# column names containing the data for that row and column.
matching_rowids = {}
# keys are rowids that do not match
rejected_rowids = {}
# attempt to sort the conditions in such a way as to minimize full
# column lookups
def cmp_conditions(atuple, btuple):
a = atuple[1]
b = btuple[1]
if type(a) is type(b):
if isinstance(a, PrefixCond) and isinstance(b, PrefixCond):
# longest prefix first
return cmp(len(b.prefix), len(a.prefix))
if isinstance(a, LikeCond) and isinstance(b, LikeCond):
# longest likestr first
return cmp(len(b.likestr), len(a.likestr))
return 0
if isinstance(a, ExactCond):
return -1
if isinstance(b, ExactCond):
return 1
if isinstance(a, PrefixCond):
return -1
if isinstance(b, PrefixCond):
return 1
# leave all unknown condition callables alone as equals
return 0
conditionlist = conditions.items()
conditionlist.sort(cmp_conditions)
# Apply conditions to column data to find what we want
cur = self.db.cursor()
column_num = -1
for column, condition in conditionlist:
column_num = column_num + 1
searchkey = _search_col_data_key(table, column)
# speedup: don't linear search columns within loop
if column in columns:
savethiscolumndata = 1 # save the data for return
else:
savethiscolumndata = 0 # data only used for selection
try:
key, data = cur.set_range(searchkey)
while key[:len(searchkey)] == searchkey:
# extract the rowid from the key
rowid = key[-_rowid_str_len:]
if not rejected_rowids.has_key(rowid):
# if no condition was specified or the condition
# succeeds, add row to our match list.
if not condition or condition(data):
if not matching_rowids.has_key(rowid):
matching_rowids[rowid] = {}
if savethiscolumndata:
matching_rowids[rowid][column] = data
else:
if matching_rowids.has_key(rowid):
del matching_rowids[rowid]
rejected_rowids[rowid] = rowid
key, data = cur.next()
except DBError, dberror:
if dberror[0] != DB_NOTFOUND:
raise
continue
cur.close()
# we're done selecting rows, garbage collect the reject list
del rejected_rowids
# extract any remaining desired column data from the
# database for the matching rows.
if len(columns) > 0:
for rowid, rowdata in matching_rowids.items():
for column in columns:
if rowdata.has_key(column):
continue
try:
rowdata[column] = self.db.get(
_data_key(table, column, rowid))
except DBError, dberror:
if dberror[0] != DB_NOTFOUND:
raise
rowdata[column] = None
# return the matches
return matching_rowids
def Drop(self, table):
"""Remove an entire table from the database"""
txn = None
try:
txn = self.env.txn_begin()
# delete the column list
self.db.delete(_columns_key(table), txn)
cur = self.db.cursor(txn)
# delete all keys containing this tables column and row info
table_key = _search_all_data_key(table)
while 1:
try:
key, data = cur.set_range(table_key)
except DBNotFoundError:
break
# only delete items in this table
if key[:len(table_key)] != table_key:
break
cur.delete()
# delete all rowids used by this table
table_key = _search_rowid_key(table)
while 1:
try:
key, data = cur.set_range(table_key)
except DBNotFoundError:
break
# only delete items in this table
if key[:len(table_key)] != table_key:
break
cur.delete()
cur.close()
# delete the tablename from the table name list
tablelist = pickle.loads(
self.db.get(_table_names_key, txn=txn, flags=DB_RMW))
try:
tablelist.remove(table)
except ValueError:
# hmm, it wasn't there, oh well, that's what we want.
pass
# delete 1st, incase we opened with DB_DUP
self.db.delete(_table_names_key, txn)
self.db.put(_table_names_key, pickle.dumps(tablelist, 1), txn=txn)
txn.commit()
txn = None
if self.__tablecolumns.has_key(table):
del self.__tablecolumns[table]
except DBError, dberror:
if txn:
txn.abort()
raise TableDBError, dberror[1]
| mit | -5,376,170,521,990,108,000 | 35.3017 | 135 | 0.534004 | false |
itsjeyd/edx-platform | common/test/acceptance/tests/lms/test_certificate_web_view.py | 10 | 9797 | """
Acceptance tests for the certificate web view feature.
"""
from common.test.acceptance.tests.helpers import UniqueCourseTest, EventsTestMixin, load_data_str, get_element_padding
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc, CourseUpdateDesc
from common.test.acceptance.fixtures.certificates import CertificateConfigFixture
from common.test.acceptance.pages.lms.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.certificate_page import CertificatePage
from common.test.acceptance.pages.lms.course_info import CourseInfoPage
from common.test.acceptance.pages.lms.tab_nav import TabNavPage
from common.test.acceptance.pages.lms.course_nav import CourseNavPage
from common.test.acceptance.pages.lms.progress import ProgressPage
@attr(shard=5)
class CertificateWebViewTest(EventsTestMixin, UniqueCourseTest):
"""
Tests for verifying certificate web view features
"""
def setUp(self):
super(CertificateWebViewTest, self).setUp()
# set same course number as we have in fixture json
self.course_info['number'] = "335535897951379478207964576572017930000"
test_certificate_config = {
'id': 1,
'name': 'Certificate name',
'description': 'Certificate description',
'course_title': 'Course title override',
'signatories': [],
'version': 1,
'is_active': True
}
course_settings = {'certificates': test_certificate_config}
self.course_fixture = CourseFixture(
self.course_info["org"],
self.course_info["number"],
self.course_info["run"],
self.course_info["display_name"],
settings=course_settings
)
self.course_fixture.add_advanced_settings({
"cert_html_view_enabled": {"value": "true"}
})
self.course_fixture.install()
self.user_id = "99" # we have created a user with this id in fixture
self.cert_fixture = CertificateConfigFixture(self.course_id, test_certificate_config)
# Load certificate web view page for use by the tests
self.certificate_page = CertificatePage(self.browser, self.user_id, self.course_id)
def log_in_as_unique_user(self):
"""
Log in as a valid lms user.
"""
AutoAuthPage(
self.browser,
username="testcert",
email="[email protected]",
password="testuser",
course_id=self.course_id
).visit()
def test_page_has_accomplishments_banner(self):
"""
Scenario: User accomplishment banner should be present if logged in user is the one who is awarded
the certificate
Given there is a course with certificate configuration
And I have passed the course and certificate is generated
When I view the certificate web view page
Then I should see the accomplishment banner. banner should have linked-in and facebook share buttons
And When I click on `Add to Profile` button `edx.certificate.shared` event should be emitted
"""
self.cert_fixture.install()
self.log_in_as_unique_user()
self.certificate_page.visit()
self.assertTrue(self.certificate_page.accomplishment_banner.visible)
self.assertTrue(self.certificate_page.add_to_linkedin_profile_button.visible)
self.assertTrue(self.certificate_page.add_to_facebook_profile_button.visible)
self.certificate_page.add_to_linkedin_profile_button.click()
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.certificate.shared'},
number_of_matches=1
)
expected_events = [
{
'event': {
'user_id': self.user_id,
'course_id': self.course_id
}
}
]
self.assert_events_match(expected_events, actual_events)
@attr(shard=5)
class CertificateProgressPageTest(UniqueCourseTest):
"""
Tests for verifying Certificate info on Progress tab of course page.
"""
def setUp(self):
super(CertificateProgressPageTest, self).setUp()
# set same course number as we have in fixture json
self.course_info['number'] = "3355358979513794782079645765720179311111"
test_certificate_config = {
'id': 1,
'name': 'Certificate name',
'description': 'Certificate description',
'course_title': 'Course title override',
'signatories': [],
'version': 1,
'is_active': True
}
course_settings = {'certificates': test_certificate_config}
self.course_fixture = CourseFixture(
self.course_info["org"],
self.course_info["number"],
self.course_info["run"],
self.course_info["display_name"],
settings=course_settings
)
self.course_fixture.add_advanced_settings({
"cert_html_view_enabled": {"value": "true"}
})
self.course_fixture.add_update(
CourseUpdateDesc(date='January 29, 2014', content='Test course update1')
)
self.course_fixture.add_children(
XBlockFixtureDesc('static_tab', 'Test Static Tab'),
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection', grader_type='Final Exam').add_children(
XBlockFixtureDesc('problem', 'Test Problem 1', data=load_data_str('multiple_choice.xml')),
XBlockFixtureDesc('html', 'Test HTML'),
)
),
XBlockFixtureDesc('chapter', 'Test Section 2').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection 2', grader_type='Midterm Exam').add_children(
XBlockFixtureDesc('problem', 'Test Problem 2', data=load_data_str('formula_problem.xml')),
)
)
)
self.course_fixture.install()
self.user_id = "99" # we have created a user with this id in fixture
self.cert_fixture = CertificateConfigFixture(self.course_id, test_certificate_config)
self.course_info_page = CourseInfoPage(self.browser, self.course_id)
self.progress_page = ProgressPage(self.browser, self.course_id)
self.course_nav = CourseNavPage(self.browser)
self.tab_nav = TabNavPage(self.browser)
def log_in_as_unique_user(self):
"""
Log in as a valid lms user.
"""
AutoAuthPage(
self.browser,
username="testprogress",
email="[email protected]",
password="testuser",
course_id=self.course_id
).visit()
def test_progress_page_has_view_certificate_button(self):
"""
Scenario: View Certificate option should be present on Course Progress menu if the user is
awarded a certificate.
And their should be no padding around the box containing certificate info. (See SOL-1196 for details on this)
As a Student
Given there is a course with certificate configuration
And I have passed the course and certificate is generated
When I go on the Progress tab for the course
Then I should see a 'View Certificate' button
And their should be no padding around Certificate info box.
"""
self.cert_fixture.install()
self.log_in_as_unique_user()
self.complete_course_problems()
self.course_info_page.visit()
self.tab_nav.go_to_tab('Progress')
self.assertTrue(self.progress_page.q(css='.auto-cert-message').first.visible)
actual_padding = get_element_padding(self.progress_page, '.wrapper-msg.wrapper-auto-cert')
actual_padding = [int(padding) for padding in actual_padding.itervalues()]
expected_padding = [0, 0, 0, 0]
# Verify that their is no padding around the box containing certificate info.
self.assertEqual(actual_padding, expected_padding)
def complete_course_problems(self):
"""
Complete Course Problems.
Problems were added in the setUp
"""
self.course_info_page.visit()
self.tab_nav.go_to_tab('Course')
# Navigate to Test Subsection in Test Section Section
self.course_nav.go_to_section('Test Section', 'Test Subsection')
# Navigate to Test Problem 1
self.course_nav.go_to_vertical('Test Problem 1')
# Select correct value for from select menu
self.course_nav.q(css='select option[value="{}"]'.format('blue')).first.click()
# Select correct radio button for the answer
self.course_nav.q(css='fieldset div.field:nth-child(4) input').nth(0).click()
# Select correct radio buttons for the answer
self.course_nav.q(css='fieldset div.field:nth-child(2) input').nth(1).click()
self.course_nav.q(css='fieldset div.field:nth-child(4) input').nth(1).click()
# Submit the answer
self.course_nav.q(css='button.submit').click()
self.course_nav.wait_for_ajax()
# Navigate to the 'Test Subsection 2' of 'Test Section 2'
self.course_nav.go_to_section('Test Section 2', 'Test Subsection 2')
# Navigate to Test Problem 2
self.course_nav.go_to_vertical('Test Problem 2')
# Fill in the answer of the problem
self.course_nav.q(css='input[id^=input_][id$=_2_1]').fill('A*x^2 + sqrt(y)')
# Submit the answer
self.course_nav.q(css='button.submit').click()
self.course_nav.wait_for_ajax()
| agpl-3.0 | -7,193,028,522,696,737,000 | 39.483471 | 118 | 0.631214 | false |
enthought/traitsgui | enthought/pyface/workbench/traits_ui_view.py | 1 | 3122 | """ A view whose content is provided by a traits UI. """
# Standard library imports.
import logging
# Enthought library imports.
from enthought.traits.api import Any, Instance, Str
from enthought.traits.ui.api import UI
# Local imports.
from view import View
# Logging.
logger = logging.getLogger(__name__)
class TraitsUIView(View):
""" A view whose content is provided by a traits UI. """
#### 'TraitsUIView' interface #############################################
# The object that we povide a traits UI of (this defaults to the view
# iteself ie. 'self').
obj = Any
# The traits UI that represents the view.
#
# The framework sets this to the value returned by 'create_ui'.
ui = Instance(UI)
# The name of the traits UI view used to create the UI (if not specified,
# the default traits UI view is used).
view = Str
###########################################################################
# 'IWorkbenchPart' interface.
###########################################################################
#### Trait initializers ###################################################
def _name_default(self):
""" Trait initializer. """
return str(self.obj)
#### Methods ##############################################################
def create_control(self, parent):
""" Creates the toolkit-specific control that represents the editor.
'parent' is the toolkit-specific control that is the editor's parent.
Overridden to call 'create_ui' to get the traits UI.
"""
self.ui = self.create_ui(parent)
return self.ui.control
def destroy_control(self):
""" Destroys the toolkit-specific control that represents the editor.
Overridden to call 'dispose' on the traits UI.
"""
# Give the traits UI a chance to clean itself up.
if self.ui is not None:
logger.debug('disposing traits UI for view [%s]', self)
self.ui.dispose()
self.ui = None
# Break reference to the control, so the view is created afresh
# next time.
self.control = None
return
###########################################################################
# 'TraitsUIView' interface.
###########################################################################
#### Trait initializers ###################################################
def _obj_default(self):
""" Trait initializer. """
return self
#### Methods ##############################################################
def create_ui(self, parent):
""" Creates the traits UI that represents the editor.
By default it calls 'edit_traits' on the view's 'model'. If you
want more control over the creation of the traits UI then override!
"""
ui = self.obj.edit_traits(
parent=parent, view=self.view, kind='subpanel'
)
return ui
#### EOF ######################################################################
| bsd-3-clause | 8,619,045,688,183,192,000 | 27.907407 | 79 | 0.483344 | false |
Intel-tensorflow/tensorflow | tensorflow/python/data/experimental/ops/cardinality.py | 14 | 4688 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Cardinality analysis of `Dataset` objects."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import gen_experimental_dataset_ops as ged_ops
from tensorflow.python.util.tf_export import tf_export
INFINITE = -1
UNKNOWN = -2
tf_export("data.experimental.INFINITE_CARDINALITY").export_constant(
__name__, "INFINITE")
tf_export("data.experimental.UNKNOWN_CARDINALITY").export_constant(
__name__, "UNKNOWN")
# TODO(b/157691652): Deprecate this method after migrating users to the new API.
@tf_export("data.experimental.cardinality")
def cardinality(dataset):
"""Returns the cardinality of `dataset`, if known.
The operation returns the cardinality of `dataset`. The operation may return
`tf.data.experimental.INFINITE_CARDINALITY` if `dataset` contains an infinite
number of elements or `tf.data.experimental.UNKNOWN_CARDINALITY` if the
analysis fails to determine the number of elements in `dataset` (e.g. when the
dataset source is a file).
>>> dataset = tf.data.Dataset.range(42)
>>> print(tf.data.experimental.cardinality(dataset).numpy())
42
>>> dataset = dataset.repeat()
>>> cardinality = tf.data.experimental.cardinality(dataset)
>>> print((cardinality == tf.data.experimental.INFINITE_CARDINALITY).numpy())
True
>>> dataset = dataset.filter(lambda x: True)
>>> cardinality = tf.data.experimental.cardinality(dataset)
>>> print((cardinality == tf.data.experimental.UNKNOWN_CARDINALITY).numpy())
True
Args:
dataset: A `tf.data.Dataset` for which to determine cardinality.
Returns:
A scalar `tf.int64` `Tensor` representing the cardinality of `dataset`. If
the cardinality is infinite or unknown, the operation returns the named
constant `INFINITE_CARDINALITY` and `UNKNOWN_CARDINALITY` respectively.
"""
return gen_dataset_ops.dataset_cardinality(dataset._variant_tensor) # pylint: disable=protected-access
@tf_export("data.experimental.assert_cardinality")
def assert_cardinality(expected_cardinality):
"""Asserts the cardinality of the input dataset.
NOTE: The following assumes that "examples.tfrecord" contains 42 records.
>>> dataset = tf.data.TFRecordDataset("examples.tfrecord")
>>> cardinality = tf.data.experimental.cardinality(dataset)
>>> print((cardinality == tf.data.experimental.UNKNOWN_CARDINALITY).numpy())
True
>>> dataset = dataset.apply(tf.data.experimental.assert_cardinality(42))
>>> print(tf.data.experimental.cardinality(dataset).numpy())
42
Args:
expected_cardinality: The expected cardinality of the input dataset.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
Raises:
FailedPreconditionError: The assertion is checked at runtime (when iterating
the dataset) and an error is raised if the actual and expected cardinality
differ.
"""
def _apply_fn(dataset):
return _AssertCardinalityDataset(dataset, expected_cardinality)
return _apply_fn
class _AssertCardinalityDataset(dataset_ops.UnaryUnchangedStructureDataset):
"""A `Dataset` that assert the cardinality of its input."""
def __init__(self, input_dataset, expected_cardinality):
self._input_dataset = input_dataset
self._expected_cardinality = ops.convert_to_tensor(
expected_cardinality, dtype=dtypes.int64, name="expected_cardinality")
# pylint: enable=protected-access
variant_tensor = ged_ops.assert_cardinality_dataset(
self._input_dataset._variant_tensor, # pylint: disable=protected-access
self._expected_cardinality,
**self._flat_structure)
super(_AssertCardinalityDataset, self).__init__(input_dataset,
variant_tensor)
| apache-2.0 | 5,984,968,972,292,914,000 | 39.068376 | 105 | 0.725683 | false |
erinspace/osf.io | osf_tests/test_handlers.py | 16 | 1470 | import pytest
from nose.tools import assert_raises
from framework.celery_tasks import handlers
from website.project.tasks import on_node_updated
class TestCeleryHandlers:
@pytest.fixture()
def queue(self):
return handlers.queue()
def test_get_task_from_queue_not_there(self):
task = handlers.get_task_from_queue(
'website.project.tasks.on_node_updated',
predicate=lambda task: task.kwargs['node_id'] == 'woop'
)
assert task is False
def test_get_task_from_queue(self, queue):
handlers.queue().append(
on_node_updated.s(node_id='woop', user_id='heyyo', first_save=False, saved_fields={'contributors'})
)
task = handlers.get_task_from_queue(
'website.project.tasks.on_node_updated',
predicate=lambda task: task.kwargs['node_id'] == 'woop'
)
assert task
def test_get_task_from_queue_errors_with_two_tasks(self, queue):
tasks = [
on_node_updated.s(node_id='woop', user_id='heyyo', first_save=False, saved_fields={'title'}),
on_node_updated.s(node_id='woop', user_id='heyyo', first_save=False, saved_fields={'contributors'})
]
queue += tasks
with assert_raises(ValueError):
handlers.get_task_from_queue(
'website.project.tasks.on_node_updated',
predicate=lambda task: task.kwargs['node_id'] == 'woop'
)
| apache-2.0 | -6,787,971,714,275,002,000 | 34 | 111 | 0.609524 | false |
stephane-martin/salt-debian-packaging | salt-2016.3.3/salt/fileserver/azurefs.py | 2 | 9454 | # -*- coding: utf-8 -*-
'''
The backend for serving files from the Azure blob storage service.
To enable, add ``azurefs`` to the :conf_master:`fileserver_backend` option in
the Master config file.
.. code-block:: yaml
fileserver_backend:
- azurefs
Each environment is configured as a storage container. The name of the container
must match the name of the environment. The ``storage_account`` is the name of
the storage account inside Azure where the container lives, and the
``storage_key`` is the access key used for that storage account:
.. code-block:: yaml
azurefs_envs:
base:
storage_account: my_storage
storage_key: frehgfw34fWGegG07fwsfw343tGFDSDGDFGD==
With this configuration, multiple storage accounts can be used with a single
salt instrastructure.
'''
# Import python libs
from __future__ import absolute_import
import os
import os.path
import logging
import time
try:
import fcntl
HAS_FCNTL = True
except ImportError:
# fcntl is not available on windows
HAS_FCNTL = False
# Import salt libs
import salt.fileserver
import salt.utils
import salt.syspaths
try:
import salt.utils.msazure as azure
HAS_AZURE = True
except ImportError:
HAS_AZURE = False
__virtualname__ = 'azurefs'
log = logging.getLogger()
def __virtual__():
'''
Only load if file_recv is enabled
'''
if __virtualname__ not in __opts__['fileserver_backend']:
return False
if not HAS_AZURE:
return False
return True
def find_file(path, saltenv='base', env=None, **kwargs):
'''
Search the environment for the relative path
'''
if env is not None:
salt.utils.warn_until(
'Carbon',
'Passing a salt environment should be done using \'saltenv\' '
'not \'env\'. This functionality will be removed in Salt Carbon.'
)
# Backwards compatibility
saltenv = env
fnd = {'path': '',
'rel': ''}
try:
root = os.path.join(salt.syspaths.CACHE_DIR, 'azure')
except IndexError:
# An invalid index was passed
return fnd
except ValueError:
# An invalid index option was passed
return fnd
full = os.path.join(root, path)
if os.path.isfile(full) and not salt.fileserver.is_file_ignored(
__opts__, full):
fnd['path'] = full
fnd['rel'] = path
fnd['stat'] = list(os.stat(full))
return fnd
def envs():
'''
Treat each container as an environment
'''
containers = __opts__.get('azurefs_containers', [])
return containers.keys()
def serve_file(load, fnd):
'''
Return a chunk from a file based on the data received
'''
if 'env' in load:
salt.utils.warn_until(
'Carbon',
'Passing a salt environment should be done using \'saltenv\' '
'not \'env\'. This functionality will be removed in Salt Carbon.'
)
load['saltenv'] = load.pop('env')
ret = {'data': '',
'dest': ''}
if 'path' not in load or 'loc' not in load or 'saltenv' not in load:
return ret
if not fnd['path']:
return ret
ret['dest'] = fnd['rel']
gzip = load.get('gzip', None)
with salt.utils.fopen(fnd['path'], 'rb') as fp_:
fp_.seek(load['loc'])
data = fp_.read(__opts__['file_buffer_size'])
if gzip and data:
data = salt.utils.gzip_util.compress(data, gzip)
ret['gzip'] = gzip
ret['data'] = data
return ret
def update():
'''
When we are asked to update (regular interval) lets reap the cache
'''
base_dir = os.path.join(salt.syspaths.CACHE_DIR, 'azure')
if not os.path.isdir(base_dir):
os.makedirs(base_dir)
try:
salt.fileserver.reap_fileserver_cache_dir(
os.path.join(base_dir, 'hash'),
find_file
)
except (IOError, OSError):
# Hash file won't exist if no files have yet been served up
pass
data_dict = {}
if os.listdir(base_dir):
# Find out what the latest file is, so that we only update files more
# recent than that, and not the entire filesystem
all_files = []
for root, subFolders, files in os.walk(base_dir):
for fn_ in files:
full_path = os.path.join(root, fn_)
all_files.append([
os.path.getmtime(full_path),
full_path,
])
if all_files:
all_files.sort()
all_files.reverse()
latest_stamp = os.path.getmtime(all_files[0][1])
format_stamp = time.strftime(
'%Y-%m-%d %H:%M:%S', time.localtime(latest_stamp)
)
#data_dict={'sysparm_query': 'sys_updated_on > {0}'.format(format_stamp)}
# Pull in any files that have changed
envs = __opts__.get('azurefs_envs', [])
for env in envs:
storage_conn = azure.get_storage_conn(opts=envs[env])
result = azure.list_blobs(
storage_conn=storage_conn,
container=env,
)
# Write out any new files to disk
for blob in result:
file_name = os.path.join(base_dir, blob)
# Make sure the directory exists first
comps = file_name.split('/')
file_path = '/'.join(comps[:-1])
if not os.path.exists(file_path):
os.makedirs(file_path)
# Write out the file
azure.get_blob(
storage_conn=storage_conn,
container=env,
name=blob,
local_path=file_name,
)
time_stamp = time.mktime(
time.strptime(
result[blob]['properties']['last_modified'][0],
'%a, %d %b %Y %H:%M:%S %Z'
),
)
os.utime(file_name, (time_stamp, time_stamp))
def file_hash(load, fnd):
'''
Return a file hash, the hash type is set in the master config file
'''
path = fnd['path']
ret = {}
# if the file doesn't exist, we can't get a hash
if not path or not os.path.isfile(path):
return ret
# set the hash_type as it is determined by config
# -- so mechanism won't change that
ret['hash_type'] = __opts__['hash_type']
# check if the hash is cached
# cache file's contents should be 'hash:mtime'
cache_path = os.path.join(salt.syspaths.CACHE_DIR,
'azure/hash',
load['saltenv'],
'{0}.hash.{1}'.format(
fnd['rel'], __opts__['hash_type'])
)
# if we have a cache, serve that if the mtime hasn't changed
if os.path.exists(cache_path):
try:
with salt.utils.fopen(cache_path, 'rb') as fp_:
try:
hsum, mtime = fp_.read().split(':')
except ValueError:
log.debug(
'Fileserver attempted to read'
'incomplete cache file. Retrying.'
)
file_hash(load, fnd)
return ret
if os.path.getmtime(path) == mtime:
# check if mtime changed
ret['hsum'] = hsum
return ret
except os.error:
# Can't use Python select() because we need Windows support
log.debug(
'Fileserver encountered lock'
'when reading cache file. Retrying.'
)
file_hash(load, fnd)
return ret
# if we don't have a cache entry-- lets make one
ret['hsum'] = salt.utils.get_hash(path, __opts__['hash_type'])
cache_dir = os.path.dirname(cache_path)
# make cache directory if it doesn't exist
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
# save the cache object 'hash:mtime'
if HAS_FCNTL:
with salt.utils.flopen(cache_path, 'w') as fp_:
fp_.write('{0}:{1}'.format(ret['hsum'], os.path.getmtime(path)))
fcntl.flock(fp_.fileno(), fcntl.LOCK_UN)
return ret
else:
with salt.utils.fopen(cache_path, 'w') as fp_:
fp_.write('{0}:{1}'.format(ret['hsum'], os.path.getmtime(path)))
return ret
def file_list(load):
'''
Return a list of all files on the file server in a specified environment
'''
ret = []
envs = __opts__.get('azurefs_envs', [])
storage_conn = azure.get_storage_conn(opts=envs[load['saltenv']])
result = azure.list_blobs(
storage_conn=storage_conn,
container=load['saltenv'],
)
for blob in result:
ret.append(blob)
return ret
def dir_list(load):
'''
Return a list of all directories on the master
'''
ret = []
envs = __opts__.get('azurefs_envs', [])
storage_conn = azure.get_storage_conn(opts=envs[load['saltenv']])
result = azure.list_blobs(
storage_conn=storage_conn,
container=load['saltenv'],
)
for blob in result:
if '/' not in blob:
continue
comps = blob.split('/')
path = '/'.join(comps[:-1])
if path not in ret:
ret.append(path)
return ret
| apache-2.0 | 7,943,245,060,333,341,000 | 28.54375 | 81 | 0.546541 | false |
ntuecon/server | pyenv/Lib/site-packages/win32/lib/win32cryptcon.py | 3 | 75734 | # Generated by h2py from WinCrypt.h
def GET_ALG_CLASS(x): return (x & (7 << 13))
def GET_ALG_TYPE(x): return (x & (15 << 9))
def GET_ALG_SID(x): return (x & (511))
ALG_CLASS_ANY = (0)
ALG_CLASS_SIGNATURE = (1 << 13)
ALG_CLASS_MSG_ENCRYPT = (2 << 13)
ALG_CLASS_DATA_ENCRYPT = (3 << 13)
ALG_CLASS_HASH = (4 << 13)
ALG_CLASS_KEY_EXCHANGE = (5 << 13)
ALG_CLASS_ALL = (7 << 13)
ALG_TYPE_ANY = (0)
ALG_TYPE_DSS = (1 << 9)
ALG_TYPE_RSA = (2 << 9)
ALG_TYPE_BLOCK = (3 << 9)
ALG_TYPE_STREAM = (4 << 9)
ALG_TYPE_DH = (5 << 9)
ALG_TYPE_SECURECHANNEL = (6 << 9)
ALG_SID_ANY = (0)
ALG_SID_RSA_ANY = 0
ALG_SID_RSA_PKCS = 1
ALG_SID_RSA_MSATWORK = 2
ALG_SID_RSA_ENTRUST = 3
ALG_SID_RSA_PGP = 4
ALG_SID_DSS_ANY = 0
ALG_SID_DSS_PKCS = 1
ALG_SID_DSS_DMS = 2
ALG_SID_DES = 1
ALG_SID_3DES = 3
ALG_SID_DESX = 4
ALG_SID_IDEA = 5
ALG_SID_CAST = 6
ALG_SID_SAFERSK64 = 7
ALG_SID_SAFERSK128 = 8
ALG_SID_3DES_112 = 9
ALG_SID_CYLINK_MEK = 12
ALG_SID_RC5 = 13
ALG_SID_AES_128 = 14
ALG_SID_AES_192 = 15
ALG_SID_AES_256 = 16
ALG_SID_AES = 17
ALG_SID_SKIPJACK = 10
ALG_SID_TEK = 11
CRYPT_MODE_CBCI = 6
CRYPT_MODE_CFBP = 7
CRYPT_MODE_OFBP = 8
CRYPT_MODE_CBCOFM = 9
CRYPT_MODE_CBCOFMI = 10
ALG_SID_RC2 = 2
ALG_SID_RC4 = 1
ALG_SID_SEAL = 2
ALG_SID_DH_SANDF = 1
ALG_SID_DH_EPHEM = 2
ALG_SID_AGREED_KEY_ANY = 3
ALG_SID_KEA = 4
ALG_SID_MD2 = 1
ALG_SID_MD4 = 2
ALG_SID_MD5 = 3
ALG_SID_SHA = 4
ALG_SID_SHA1 = 4
ALG_SID_MAC = 5
ALG_SID_RIPEMD = 6
ALG_SID_RIPEMD160 = 7
ALG_SID_SSL3SHAMD5 = 8
ALG_SID_HMAC = 9
ALG_SID_TLS1PRF = 10
ALG_SID_HASH_REPLACE_OWF = 11
ALG_SID_SHA_256 = 12
ALG_SID_SHA_384 = 13
ALG_SID_SHA_512 = 14
ALG_SID_SSL3_MASTER = 1
ALG_SID_SCHANNEL_MASTER_HASH = 2
ALG_SID_SCHANNEL_MAC_KEY = 3
ALG_SID_PCT1_MASTER = 4
ALG_SID_SSL2_MASTER = 5
ALG_SID_TLS1_MASTER = 6
ALG_SID_SCHANNEL_ENC_KEY = 7
ALG_SID_EXAMPLE = 80
CALG_MD2 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD2)
CALG_MD4 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD4)
CALG_MD5 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD5)
CALG_SHA = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA)
CALG_SHA1 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA1)
CALG_MAC = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MAC)
CALG_RSA_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_RSA | ALG_SID_RSA_ANY)
CALG_DSS_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_DSS | ALG_SID_DSS_ANY)
CALG_NO_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_ANY | ALG_SID_ANY)
CALG_RSA_KEYX = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_RSA|ALG_SID_RSA_ANY)
CALG_DES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_DES)
CALG_3DES_112 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_3DES_112)
CALG_3DES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_3DES)
CALG_DESX = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_DESX)
CALG_RC2 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_RC2)
CALG_RC4 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_STREAM|ALG_SID_RC4)
CALG_SEAL = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_STREAM|ALG_SID_SEAL)
CALG_DH_SF = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_DH_SANDF)
CALG_DH_EPHEM = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_DH_EPHEM)
CALG_AGREEDKEY_ANY = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_AGREED_KEY_ANY)
CALG_KEA_KEYX = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_KEA)
CALG_HUGHES_MD5 = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_ANY|ALG_SID_MD5)
CALG_SKIPJACK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_SKIPJACK)
CALG_TEK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_TEK)
CALG_CYLINK_MEK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_CYLINK_MEK)
CALG_SSL3_SHAMD5 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SSL3SHAMD5)
CALG_SSL3_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SSL3_MASTER)
CALG_SCHANNEL_MASTER_HASH = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_MASTER_HASH)
CALG_SCHANNEL_MAC_KEY = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_MAC_KEY)
CALG_SCHANNEL_ENC_KEY = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_ENC_KEY)
CALG_PCT1_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_PCT1_MASTER)
CALG_SSL2_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SSL2_MASTER)
CALG_TLS1_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_TLS1_MASTER)
CALG_RC5 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_RC5)
CALG_HMAC = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HMAC)
CALG_TLS1PRF = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_TLS1PRF)
CALG_HASH_REPLACE_OWF = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HASH_REPLACE_OWF)
CALG_AES_128 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_128)
CALG_AES_192 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_192)
CALG_AES_256 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_256)
CALG_AES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES)
CALG_SHA_256 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_256)
CALG_SHA_384 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_384)
CALG_SHA_512 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_512)
CRYPT_VERIFYCONTEXT = (-268435456)
CRYPT_NEWKEYSET = 0x00000008
CRYPT_DELETEKEYSET = 0x00000010
CRYPT_MACHINE_KEYSET = 0x00000020
CRYPT_SILENT = 0x00000040
CRYPT_EXPORTABLE = 0x00000001
CRYPT_USER_PROTECTED = 0x00000002
CRYPT_CREATE_SALT = 0x00000004
CRYPT_UPDATE_KEY = 0x00000008
CRYPT_NO_SALT = 0x00000010
CRYPT_PREGEN = 0x00000040
CRYPT_RECIPIENT = 0x00000010
CRYPT_INITIATOR = 0x00000040
CRYPT_ONLINE = 0x00000080
CRYPT_SF = 0x00000100
CRYPT_CREATE_IV = 0x00000200
CRYPT_KEK = 0x00000400
CRYPT_DATA_KEY = 0x00000800
CRYPT_VOLATILE = 0x00001000
CRYPT_SGCKEY = 0x00002000
CRYPT_ARCHIVABLE = 0x00004000
RSA1024BIT_KEY = 0x04000000
CRYPT_SERVER = 0x00000400
KEY_LENGTH_MASK = (-65536)
CRYPT_Y_ONLY = 0x00000001
CRYPT_SSL2_FALLBACK = 0x00000002
CRYPT_DESTROYKEY = 0x00000004
CRYPT_OAEP = 0x00000040
CRYPT_BLOB_VER3 = 0x00000080
CRYPT_IPSEC_HMAC_KEY = 0x00000100
CRYPT_DECRYPT_RSA_NO_PADDING_CHECK = 0x00000020
CRYPT_SECRETDIGEST = 0x00000001
CRYPT_OWF_REPL_LM_HASH = 0x00000001
CRYPT_LITTLE_ENDIAN = 0x00000001
CRYPT_NOHASHOID = 0x00000001
CRYPT_TYPE2_FORMAT = 0x00000002
CRYPT_X931_FORMAT = 0x00000004
CRYPT_MACHINE_DEFAULT = 0x00000001
CRYPT_USER_DEFAULT = 0x00000002
CRYPT_DELETE_DEFAULT = 0x00000004
SIMPLEBLOB = 0x1
PUBLICKEYBLOB = 0x6
PRIVATEKEYBLOB = 0x7
PLAINTEXTKEYBLOB = 0x8
OPAQUEKEYBLOB = 0x9
PUBLICKEYBLOBEX = 0xA
SYMMETRICWRAPKEYBLOB = 0xB
AT_KEYEXCHANGE = 1
AT_SIGNATURE = 2
CRYPT_USERDATA = 1
KP_IV = 1
KP_SALT = 2
KP_PADDING = 3
KP_MODE = 4
KP_MODE_BITS = 5
KP_PERMISSIONS = 6
KP_ALGID = 7
KP_BLOCKLEN = 8
KP_KEYLEN = 9
KP_SALT_EX = 10
KP_P = 11
KP_G = 12
KP_Q = 13
KP_X = 14
KP_Y = 15
KP_RA = 16
KP_RB = 17
KP_INFO = 18
KP_EFFECTIVE_KEYLEN = 19
KP_SCHANNEL_ALG = 20
KP_CLIENT_RANDOM = 21
KP_SERVER_RANDOM = 22
KP_RP = 23
KP_PRECOMP_MD5 = 24
KP_PRECOMP_SHA = 25
KP_CERTIFICATE = 26
KP_CLEAR_KEY = 27
KP_PUB_EX_LEN = 28
KP_PUB_EX_VAL = 29
KP_KEYVAL = 30
KP_ADMIN_PIN = 31
KP_KEYEXCHANGE_PIN = 32
KP_SIGNATURE_PIN = 33
KP_PREHASH = 34
KP_ROUNDS = 35
KP_OAEP_PARAMS = 36
KP_CMS_KEY_INFO = 37
KP_CMS_DH_KEY_INFO = 38
KP_PUB_PARAMS = 39
KP_VERIFY_PARAMS = 40
KP_HIGHEST_VERSION = 41
KP_GET_USE_COUNT = 42
PKCS5_PADDING = 1
RANDOM_PADDING = 2
ZERO_PADDING = 3
CRYPT_MODE_CBC = 1
CRYPT_MODE_ECB = 2
CRYPT_MODE_OFB = 3
CRYPT_MODE_CFB = 4
CRYPT_MODE_CTS = 5
CRYPT_ENCRYPT = 0x0001
CRYPT_DECRYPT = 0x0002
CRYPT_EXPORT = 0x0004
CRYPT_READ = 0x0008
CRYPT_WRITE = 0x0010
CRYPT_MAC = 0x0020
CRYPT_EXPORT_KEY = 0x0040
CRYPT_IMPORT_KEY = 0x0080
CRYPT_ARCHIVE = 0x0100
HP_ALGID = 0x0001
HP_HASHVAL = 0x0002
HP_HASHSIZE = 0x0004
HP_HMAC_INFO = 0x0005
HP_TLS1PRF_LABEL = 0x0006
HP_TLS1PRF_SEED = 0x0007
CRYPT_FAILED = 0
CRYPT_SUCCEED = 1
def RCRYPT_SUCCEEDED(rt): return ((rt) == CRYPT_SUCCEED)
def RCRYPT_FAILED(rt): return ((rt) == CRYPT_FAILED)
PP_ENUMALGS = 1
PP_ENUMCONTAINERS = 2
PP_IMPTYPE = 3
PP_NAME = 4
PP_VERSION = 5
PP_CONTAINER = 6
PP_CHANGE_PASSWORD = 7
PP_KEYSET_SEC_DESCR = 8
PP_CERTCHAIN = 9
PP_KEY_TYPE_SUBTYPE = 10
PP_PROVTYPE = 16
PP_KEYSTORAGE = 17
PP_APPLI_CERT = 18
PP_SYM_KEYSIZE = 19
PP_SESSION_KEYSIZE = 20
PP_UI_PROMPT = 21
PP_ENUMALGS_EX = 22
PP_ENUMMANDROOTS = 25
PP_ENUMELECTROOTS = 26
PP_KEYSET_TYPE = 27
PP_ADMIN_PIN = 31
PP_KEYEXCHANGE_PIN = 32
PP_SIGNATURE_PIN = 33
PP_SIG_KEYSIZE_INC = 34
PP_KEYX_KEYSIZE_INC = 35
PP_UNIQUE_CONTAINER = 36
PP_SGC_INFO = 37
PP_USE_HARDWARE_RNG = 38
PP_KEYSPEC = 39
PP_ENUMEX_SIGNING_PROT = 40
PP_CRYPT_COUNT_KEY_USE = 41
CRYPT_FIRST = 1
CRYPT_NEXT = 2
CRYPT_SGC_ENUM = 4
CRYPT_IMPL_HARDWARE = 1
CRYPT_IMPL_SOFTWARE = 2
CRYPT_IMPL_MIXED = 3
CRYPT_IMPL_UNKNOWN = 4
CRYPT_IMPL_REMOVABLE = 8
CRYPT_SEC_DESCR = 0x00000001
CRYPT_PSTORE = 0x00000002
CRYPT_UI_PROMPT = 0x00000004
CRYPT_FLAG_PCT1 = 0x0001
CRYPT_FLAG_SSL2 = 0x0002
CRYPT_FLAG_SSL3 = 0x0004
CRYPT_FLAG_TLS1 = 0x0008
CRYPT_FLAG_IPSEC = 0x0010
CRYPT_FLAG_SIGNING = 0x0020
CRYPT_SGC = 0x0001
CRYPT_FASTSGC = 0x0002
PP_CLIENT_HWND = 1
PP_CONTEXT_INFO = 11
PP_KEYEXCHANGE_KEYSIZE = 12
PP_SIGNATURE_KEYSIZE = 13
PP_KEYEXCHANGE_ALG = 14
PP_SIGNATURE_ALG = 15
PP_DELETEKEY = 24
PROV_RSA_FULL = 1
PROV_RSA_SIG = 2
PROV_DSS = 3
PROV_FORTEZZA = 4
PROV_MS_EXCHANGE = 5
PROV_SSL = 6
PROV_RSA_SCHANNEL = 12
PROV_DSS_DH = 13
PROV_EC_ECDSA_SIG = 14
PROV_EC_ECNRA_SIG = 15
PROV_EC_ECDSA_FULL = 16
PROV_EC_ECNRA_FULL = 17
PROV_DH_SCHANNEL = 18
PROV_SPYRUS_LYNKS = 20
PROV_RNG = 21
PROV_INTEL_SEC = 22
PROV_REPLACE_OWF = 23
PROV_RSA_AES = 24
MS_DEF_PROV_A = "Microsoft Base Cryptographic Provider v1.0"
MS_DEF_PROV = MS_DEF_PROV_A
MS_ENHANCED_PROV_A = "Microsoft Enhanced Cryptographic Provider v1.0"
MS_ENHANCED_PROV = MS_ENHANCED_PROV_A
MS_STRONG_PROV_A = "Microsoft Strong Cryptographic Provider"
MS_STRONG_PROV = MS_STRONG_PROV_A
MS_DEF_RSA_SIG_PROV_A = "Microsoft RSA Signature Cryptographic Provider"
MS_DEF_RSA_SIG_PROV = MS_DEF_RSA_SIG_PROV_A
MS_DEF_RSA_SCHANNEL_PROV_A = "Microsoft RSA SChannel Cryptographic Provider"
MS_DEF_RSA_SCHANNEL_PROV = MS_DEF_RSA_SCHANNEL_PROV_A
MS_DEF_DSS_PROV_A = "Microsoft Base DSS Cryptographic Provider"
MS_DEF_DSS_PROV = MS_DEF_DSS_PROV_A
MS_DEF_DSS_DH_PROV_A = "Microsoft Base DSS and Diffie-Hellman Cryptographic Provider"
MS_DEF_DSS_DH_PROV = MS_DEF_DSS_DH_PROV_A
MS_ENH_DSS_DH_PROV_A = "Microsoft Enhanced DSS and Diffie-Hellman Cryptographic Provider"
MS_ENH_DSS_DH_PROV = MS_ENH_DSS_DH_PROV_A
MS_DEF_DH_SCHANNEL_PROV_A = "Microsoft DH SChannel Cryptographic Provider"
MS_DEF_DH_SCHANNEL_PROV = MS_DEF_DH_SCHANNEL_PROV_A
MS_SCARD_PROV_A = "Microsoft Base Smart Card Crypto Provider"
MS_SCARD_PROV = MS_SCARD_PROV_A
MS_ENH_RSA_AES_PROV_A = "Microsoft Enhanced RSA and AES Cryptographic Provider"
MS_ENH_RSA_AES_PROV = MS_ENH_RSA_AES_PROV_A
MAXUIDLEN = 64
EXPO_OFFLOAD_REG_VALUE = "ExpoOffload"
EXPO_OFFLOAD_FUNC_NAME = "OffloadModExpo"
szKEY_CRYPTOAPI_PRIVATE_KEY_OPTIONS = \
"Software\\Policies\\Microsoft\\Cryptography"
szFORCE_KEY_PROTECTION = "ForceKeyProtection"
dwFORCE_KEY_PROTECTION_DISABLED = 0x0
dwFORCE_KEY_PROTECTION_USER_SELECT = 0x1
dwFORCE_KEY_PROTECTION_HIGH = 0x2
szKEY_CACHE_ENABLED = "CachePrivateKeys"
szKEY_CACHE_SECONDS = "PrivateKeyLifetimeSeconds"
CUR_BLOB_VERSION = 2
SCHANNEL_MAC_KEY = 0x00000000
SCHANNEL_ENC_KEY = 0x00000001
INTERNATIONAL_USAGE = 0x00000001
szOID_RSA = "1.2.840.113549"
szOID_PKCS = "1.2.840.113549.1"
szOID_RSA_HASH = "1.2.840.113549.2"
szOID_RSA_ENCRYPT = "1.2.840.113549.3"
szOID_PKCS_1 = "1.2.840.113549.1.1"
szOID_PKCS_2 = "1.2.840.113549.1.2"
szOID_PKCS_3 = "1.2.840.113549.1.3"
szOID_PKCS_4 = "1.2.840.113549.1.4"
szOID_PKCS_5 = "1.2.840.113549.1.5"
szOID_PKCS_6 = "1.2.840.113549.1.6"
szOID_PKCS_7 = "1.2.840.113549.1.7"
szOID_PKCS_8 = "1.2.840.113549.1.8"
szOID_PKCS_9 = "1.2.840.113549.1.9"
szOID_PKCS_10 = "1.2.840.113549.1.10"
szOID_PKCS_12 = "1.2.840.113549.1.12"
szOID_RSA_RSA = "1.2.840.113549.1.1.1"
szOID_RSA_MD2RSA = "1.2.840.113549.1.1.2"
szOID_RSA_MD4RSA = "1.2.840.113549.1.1.3"
szOID_RSA_MD5RSA = "1.2.840.113549.1.1.4"
szOID_RSA_SHA1RSA = "1.2.840.113549.1.1.5"
szOID_RSA_SETOAEP_RSA = "1.2.840.113549.1.1.6"
szOID_RSA_DH = "1.2.840.113549.1.3.1"
szOID_RSA_data = "1.2.840.113549.1.7.1"
szOID_RSA_signedData = "1.2.840.113549.1.7.2"
szOID_RSA_envelopedData = "1.2.840.113549.1.7.3"
szOID_RSA_signEnvData = "1.2.840.113549.1.7.4"
szOID_RSA_digestedData = "1.2.840.113549.1.7.5"
szOID_RSA_hashedData = "1.2.840.113549.1.7.5"
szOID_RSA_encryptedData = "1.2.840.113549.1.7.6"
szOID_RSA_emailAddr = "1.2.840.113549.1.9.1"
szOID_RSA_unstructName = "1.2.840.113549.1.9.2"
szOID_RSA_contentType = "1.2.840.113549.1.9.3"
szOID_RSA_messageDigest = "1.2.840.113549.1.9.4"
szOID_RSA_signingTime = "1.2.840.113549.1.9.5"
szOID_RSA_counterSign = "1.2.840.113549.1.9.6"
szOID_RSA_challengePwd = "1.2.840.113549.1.9.7"
szOID_RSA_unstructAddr = "1.2.840.113549.1.9.8"
szOID_RSA_extCertAttrs = "1.2.840.113549.1.9.9"
szOID_RSA_certExtensions = "1.2.840.113549.1.9.14"
szOID_RSA_SMIMECapabilities = "1.2.840.113549.1.9.15"
szOID_RSA_preferSignedData = "1.2.840.113549.1.9.15.1"
szOID_RSA_SMIMEalg = "1.2.840.113549.1.9.16.3"
szOID_RSA_SMIMEalgESDH = "1.2.840.113549.1.9.16.3.5"
szOID_RSA_SMIMEalgCMS3DESwrap = "1.2.840.113549.1.9.16.3.6"
szOID_RSA_SMIMEalgCMSRC2wrap = "1.2.840.113549.1.9.16.3.7"
szOID_RSA_MD2 = "1.2.840.113549.2.2"
szOID_RSA_MD4 = "1.2.840.113549.2.4"
szOID_RSA_MD5 = "1.2.840.113549.2.5"
szOID_RSA_RC2CBC = "1.2.840.113549.3.2"
szOID_RSA_RC4 = "1.2.840.113549.3.4"
szOID_RSA_DES_EDE3_CBC = "1.2.840.113549.3.7"
szOID_RSA_RC5_CBCPad = "1.2.840.113549.3.9"
szOID_ANSI_X942 = "1.2.840.10046"
szOID_ANSI_X942_DH = "1.2.840.10046.2.1"
szOID_X957 = "1.2.840.10040"
szOID_X957_DSA = "1.2.840.10040.4.1"
szOID_X957_SHA1DSA = "1.2.840.10040.4.3"
szOID_DS = "2.5"
szOID_DSALG = "2.5.8"
szOID_DSALG_CRPT = "2.5.8.1"
szOID_DSALG_HASH = "2.5.8.2"
szOID_DSALG_SIGN = "2.5.8.3"
szOID_DSALG_RSA = "2.5.8.1.1"
szOID_OIW = "1.3.14"
szOID_OIWSEC = "1.3.14.3.2"
szOID_OIWSEC_md4RSA = "1.3.14.3.2.2"
szOID_OIWSEC_md5RSA = "1.3.14.3.2.3"
szOID_OIWSEC_md4RSA2 = "1.3.14.3.2.4"
szOID_OIWSEC_desECB = "1.3.14.3.2.6"
szOID_OIWSEC_desCBC = "1.3.14.3.2.7"
szOID_OIWSEC_desOFB = "1.3.14.3.2.8"
szOID_OIWSEC_desCFB = "1.3.14.3.2.9"
szOID_OIWSEC_desMAC = "1.3.14.3.2.10"
szOID_OIWSEC_rsaSign = "1.3.14.3.2.11"
szOID_OIWSEC_dsa = "1.3.14.3.2.12"
szOID_OIWSEC_shaDSA = "1.3.14.3.2.13"
szOID_OIWSEC_mdc2RSA = "1.3.14.3.2.14"
szOID_OIWSEC_shaRSA = "1.3.14.3.2.15"
szOID_OIWSEC_dhCommMod = "1.3.14.3.2.16"
szOID_OIWSEC_desEDE = "1.3.14.3.2.17"
szOID_OIWSEC_sha = "1.3.14.3.2.18"
szOID_OIWSEC_mdc2 = "1.3.14.3.2.19"
szOID_OIWSEC_dsaComm = "1.3.14.3.2.20"
szOID_OIWSEC_dsaCommSHA = "1.3.14.3.2.21"
szOID_OIWSEC_rsaXchg = "1.3.14.3.2.22"
szOID_OIWSEC_keyHashSeal = "1.3.14.3.2.23"
szOID_OIWSEC_md2RSASign = "1.3.14.3.2.24"
szOID_OIWSEC_md5RSASign = "1.3.14.3.2.25"
szOID_OIWSEC_sha1 = "1.3.14.3.2.26"
szOID_OIWSEC_dsaSHA1 = "1.3.14.3.2.27"
szOID_OIWSEC_dsaCommSHA1 = "1.3.14.3.2.28"
szOID_OIWSEC_sha1RSASign = "1.3.14.3.2.29"
szOID_OIWDIR = "1.3.14.7.2"
szOID_OIWDIR_CRPT = "1.3.14.7.2.1"
szOID_OIWDIR_HASH = "1.3.14.7.2.2"
szOID_OIWDIR_SIGN = "1.3.14.7.2.3"
szOID_OIWDIR_md2 = "1.3.14.7.2.2.1"
szOID_OIWDIR_md2RSA = "1.3.14.7.2.3.1"
szOID_INFOSEC = "2.16.840.1.101.2.1"
szOID_INFOSEC_sdnsSignature = "2.16.840.1.101.2.1.1.1"
szOID_INFOSEC_mosaicSignature = "2.16.840.1.101.2.1.1.2"
szOID_INFOSEC_sdnsConfidentiality = "2.16.840.1.101.2.1.1.3"
szOID_INFOSEC_mosaicConfidentiality = "2.16.840.1.101.2.1.1.4"
szOID_INFOSEC_sdnsIntegrity = "2.16.840.1.101.2.1.1.5"
szOID_INFOSEC_mosaicIntegrity = "2.16.840.1.101.2.1.1.6"
szOID_INFOSEC_sdnsTokenProtection = "2.16.840.1.101.2.1.1.7"
szOID_INFOSEC_mosaicTokenProtection = "2.16.840.1.101.2.1.1.8"
szOID_INFOSEC_sdnsKeyManagement = "2.16.840.1.101.2.1.1.9"
szOID_INFOSEC_mosaicKeyManagement = "2.16.840.1.101.2.1.1.10"
szOID_INFOSEC_sdnsKMandSig = "2.16.840.1.101.2.1.1.11"
szOID_INFOSEC_mosaicKMandSig = "2.16.840.1.101.2.1.1.12"
szOID_INFOSEC_SuiteASignature = "2.16.840.1.101.2.1.1.13"
szOID_INFOSEC_SuiteAConfidentiality = "2.16.840.1.101.2.1.1.14"
szOID_INFOSEC_SuiteAIntegrity = "2.16.840.1.101.2.1.1.15"
szOID_INFOSEC_SuiteATokenProtection = "2.16.840.1.101.2.1.1.16"
szOID_INFOSEC_SuiteAKeyManagement = "2.16.840.1.101.2.1.1.17"
szOID_INFOSEC_SuiteAKMandSig = "2.16.840.1.101.2.1.1.18"
szOID_INFOSEC_mosaicUpdatedSig = "2.16.840.1.101.2.1.1.19"
szOID_INFOSEC_mosaicKMandUpdSig = "2.16.840.1.101.2.1.1.20"
szOID_INFOSEC_mosaicUpdatedInteg = "2.16.840.1.101.2.1.1.21"
szOID_COMMON_NAME = "2.5.4.3"
szOID_SUR_NAME = "2.5.4.4"
szOID_DEVICE_SERIAL_NUMBER = "2.5.4.5"
szOID_COUNTRY_NAME = "2.5.4.6"
szOID_LOCALITY_NAME = "2.5.4.7"
szOID_STATE_OR_PROVINCE_NAME = "2.5.4.8"
szOID_STREET_ADDRESS = "2.5.4.9"
szOID_ORGANIZATION_NAME = "2.5.4.10"
szOID_ORGANIZATIONAL_UNIT_NAME = "2.5.4.11"
szOID_TITLE = "2.5.4.12"
szOID_DESCRIPTION = "2.5.4.13"
szOID_SEARCH_GUIDE = "2.5.4.14"
szOID_BUSINESS_CATEGORY = "2.5.4.15"
szOID_POSTAL_ADDRESS = "2.5.4.16"
szOID_POSTAL_CODE = "2.5.4.17"
szOID_POST_OFFICE_BOX = "2.5.4.18"
szOID_PHYSICAL_DELIVERY_OFFICE_NAME = "2.5.4.19"
szOID_TELEPHONE_NUMBER = "2.5.4.20"
szOID_TELEX_NUMBER = "2.5.4.21"
szOID_TELETEXT_TERMINAL_IDENTIFIER = "2.5.4.22"
szOID_FACSIMILE_TELEPHONE_NUMBER = "2.5.4.23"
szOID_X21_ADDRESS = "2.5.4.24"
szOID_INTERNATIONAL_ISDN_NUMBER = "2.5.4.25"
szOID_REGISTERED_ADDRESS = "2.5.4.26"
szOID_DESTINATION_INDICATOR = "2.5.4.27"
szOID_PREFERRED_DELIVERY_METHOD = "2.5.4.28"
szOID_PRESENTATION_ADDRESS = "2.5.4.29"
szOID_SUPPORTED_APPLICATION_CONTEXT = "2.5.4.30"
szOID_MEMBER = "2.5.4.31"
szOID_OWNER = "2.5.4.32"
szOID_ROLE_OCCUPANT = "2.5.4.33"
szOID_SEE_ALSO = "2.5.4.34"
szOID_USER_PASSWORD = "2.5.4.35"
szOID_USER_CERTIFICATE = "2.5.4.36"
szOID_CA_CERTIFICATE = "2.5.4.37"
szOID_AUTHORITY_REVOCATION_LIST = "2.5.4.38"
szOID_CERTIFICATE_REVOCATION_LIST = "2.5.4.39"
szOID_CROSS_CERTIFICATE_PAIR = "2.5.4.40"
szOID_GIVEN_NAME = "2.5.4.42"
szOID_INITIALS = "2.5.4.43"
szOID_DN_QUALIFIER = "2.5.4.46"
szOID_DOMAIN_COMPONENT = "0.9.2342.19200300.100.1.25"
szOID_PKCS_12_FRIENDLY_NAME_ATTR = "1.2.840.113549.1.9.20"
szOID_PKCS_12_LOCAL_KEY_ID = "1.2.840.113549.1.9.21"
szOID_PKCS_12_KEY_PROVIDER_NAME_ATTR = "1.3.6.1.4.1.311.17.1"
szOID_LOCAL_MACHINE_KEYSET = "1.3.6.1.4.1.311.17.2"
szOID_KEYID_RDN = "1.3.6.1.4.1.311.10.7.1"
CERT_RDN_ANY_TYPE = 0
CERT_RDN_ENCODED_BLOB = 1
CERT_RDN_OCTET_STRING = 2
CERT_RDN_NUMERIC_STRING = 3
CERT_RDN_PRINTABLE_STRING = 4
CERT_RDN_TELETEX_STRING = 5
CERT_RDN_T61_STRING = 5
CERT_RDN_VIDEOTEX_STRING = 6
CERT_RDN_IA5_STRING = 7
CERT_RDN_GRAPHIC_STRING = 8
CERT_RDN_VISIBLE_STRING = 9
CERT_RDN_ISO646_STRING = 9
CERT_RDN_GENERAL_STRING = 10
CERT_RDN_UNIVERSAL_STRING = 11
CERT_RDN_INT4_STRING = 11
CERT_RDN_BMP_STRING = 12
CERT_RDN_UNICODE_STRING = 12
CERT_RDN_UTF8_STRING = 13
CERT_RDN_TYPE_MASK = 0x000000FF
CERT_RDN_FLAGS_MASK = (-16777216)
CERT_RDN_ENABLE_T61_UNICODE_FLAG = (-2147483648)
CERT_RDN_ENABLE_UTF8_UNICODE_FLAG = 0x20000000
CERT_RDN_DISABLE_CHECK_TYPE_FLAG = 0x40000000
CERT_RDN_DISABLE_IE4_UTF8_FLAG = 0x01000000
CERT_RSA_PUBLIC_KEY_OBJID = szOID_RSA_RSA
CERT_DEFAULT_OID_PUBLIC_KEY_SIGN = szOID_RSA_RSA
CERT_DEFAULT_OID_PUBLIC_KEY_XCHG = szOID_RSA_RSA
CERT_V1 = 0
CERT_V2 = 1
CERT_V3 = 2
CERT_INFO_VERSION_FLAG = 1
CERT_INFO_SERIAL_NUMBER_FLAG = 2
CERT_INFO_SIGNATURE_ALGORITHM_FLAG = 3
CERT_INFO_ISSUER_FLAG = 4
CERT_INFO_NOT_BEFORE_FLAG = 5
CERT_INFO_NOT_AFTER_FLAG = 6
CERT_INFO_SUBJECT_FLAG = 7
CERT_INFO_SUBJECT_PUBLIC_KEY_INFO_FLAG = 8
CERT_INFO_ISSUER_UNIQUE_ID_FLAG = 9
CERT_INFO_SUBJECT_UNIQUE_ID_FLAG = 10
CERT_INFO_EXTENSION_FLAG = 11
CRL_V1 = 0
CRL_V2 = 1
CERT_REQUEST_V1 = 0
CERT_KEYGEN_REQUEST_V1 = 0
CTL_V1 = 0
CERT_ENCODING_TYPE_MASK = 0x0000FFFF
CMSG_ENCODING_TYPE_MASK = (-65536)
def GET_CERT_ENCODING_TYPE(X): return (X & CERT_ENCODING_TYPE_MASK)
def GET_CMSG_ENCODING_TYPE(X): return (X & CMSG_ENCODING_TYPE_MASK)
CRYPT_ASN_ENCODING = 0x00000001
CRYPT_NDR_ENCODING = 0x00000002
X509_ASN_ENCODING = 0x00000001
X509_NDR_ENCODING = 0x00000002
PKCS_7_ASN_ENCODING = 0x00010000
PKCS_7_NDR_ENCODING = 0x00020000
CRYPT_FORMAT_STR_MULTI_LINE = 0x0001
CRYPT_FORMAT_STR_NO_HEX = 0x0010
CRYPT_FORMAT_SIMPLE = 0x0001
CRYPT_FORMAT_X509 = 0x0002
CRYPT_FORMAT_OID = 0x0004
CRYPT_FORMAT_RDN_SEMICOLON = 0x0100
CRYPT_FORMAT_RDN_CRLF = 0x0200
CRYPT_FORMAT_RDN_UNQUOTE = 0x0400
CRYPT_FORMAT_RDN_REVERSE = 0x0800
CRYPT_FORMAT_COMMA = 0x1000
CRYPT_FORMAT_SEMICOLON = CRYPT_FORMAT_RDN_SEMICOLON
CRYPT_FORMAT_CRLF = CRYPT_FORMAT_RDN_CRLF
CRYPT_ENCODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8
CRYPT_ENCODE_ALLOC_FLAG = 0x8000
CRYPT_UNICODE_NAME_ENCODE_ENABLE_T61_UNICODE_FLAG = \
CERT_RDN_ENABLE_T61_UNICODE_FLAG
CRYPT_UNICODE_NAME_ENCODE_ENABLE_UTF8_UNICODE_FLAG = \
CERT_RDN_ENABLE_UTF8_UNICODE_FLAG
CRYPT_UNICODE_NAME_ENCODE_DISABLE_CHECK_TYPE_FLAG = \
CERT_RDN_DISABLE_CHECK_TYPE_FLAG
CRYPT_SORTED_CTL_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x10000
CRYPT_DECODE_NOCOPY_FLAG = 0x1
CRYPT_DECODE_TO_BE_SIGNED_FLAG = 0x2
CRYPT_DECODE_SHARE_OID_STRING_FLAG = 0x4
CRYPT_DECODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8
CRYPT_DECODE_ALLOC_FLAG = 0x8000
CRYPT_UNICODE_NAME_DECODE_DISABLE_IE4_UTF8_FLAG = \
CERT_RDN_DISABLE_IE4_UTF8_FLAG
CRYPT_ENCODE_DECODE_NONE = 0
X509_CERT = 1
X509_CERT_TO_BE_SIGNED = 2
X509_CERT_CRL_TO_BE_SIGNED = 3
X509_CERT_REQUEST_TO_BE_SIGNED = 4
X509_EXTENSIONS = 5
X509_NAME_VALUE = 6
X509_NAME = 7
X509_PUBLIC_KEY_INFO = 8
X509_AUTHORITY_KEY_ID = 9
X509_KEY_ATTRIBUTES = 10
X509_KEY_USAGE_RESTRICTION = 11
X509_ALTERNATE_NAME = 12
X509_BASIC_CONSTRAINTS = 13
X509_KEY_USAGE = 14
X509_BASIC_CONSTRAINTS2 = 15
X509_CERT_POLICIES = 16
PKCS_UTC_TIME = 17
PKCS_TIME_REQUEST = 18
RSA_CSP_PUBLICKEYBLOB = 19
X509_UNICODE_NAME = 20
X509_KEYGEN_REQUEST_TO_BE_SIGNED = 21
PKCS_ATTRIBUTE = 22
PKCS_CONTENT_INFO_SEQUENCE_OF_ANY = 23
X509_UNICODE_NAME_VALUE = 24
X509_ANY_STRING = X509_NAME_VALUE
X509_UNICODE_ANY_STRING = X509_UNICODE_NAME_VALUE
X509_OCTET_STRING = 25
X509_BITS = 26
X509_INTEGER = 27
X509_MULTI_BYTE_INTEGER = 28
X509_ENUMERATED = 29
X509_CHOICE_OF_TIME = 30
X509_AUTHORITY_KEY_ID2 = 31
X509_AUTHORITY_INFO_ACCESS = 32
X509_SUBJECT_INFO_ACCESS = X509_AUTHORITY_INFO_ACCESS
X509_CRL_REASON_CODE = X509_ENUMERATED
PKCS_CONTENT_INFO = 33
X509_SEQUENCE_OF_ANY = 34
X509_CRL_DIST_POINTS = 35
X509_ENHANCED_KEY_USAGE = 36
PKCS_CTL = 37
X509_MULTI_BYTE_UINT = 38
X509_DSS_PUBLICKEY = X509_MULTI_BYTE_UINT
X509_DSS_PARAMETERS = 39
X509_DSS_SIGNATURE = 40
PKCS_RC2_CBC_PARAMETERS = 41
PKCS_SMIME_CAPABILITIES = 42
X509_QC_STATEMENTS_EXT = 42
PKCS_RSA_PRIVATE_KEY = 43
PKCS_PRIVATE_KEY_INFO = 44
PKCS_ENCRYPTED_PRIVATE_KEY_INFO = 45
X509_PKIX_POLICY_QUALIFIER_USERNOTICE = 46
X509_DH_PUBLICKEY = X509_MULTI_BYTE_UINT
X509_DH_PARAMETERS = 47
PKCS_ATTRIBUTES = 48
PKCS_SORTED_CTL = 49
X509_ECC_SIGNATURE = 47
X942_DH_PARAMETERS = 50
X509_BITS_WITHOUT_TRAILING_ZEROES = 51
X942_OTHER_INFO = 52
X509_CERT_PAIR = 53
X509_ISSUING_DIST_POINT = 54
X509_NAME_CONSTRAINTS = 55
X509_POLICY_MAPPINGS = 56
X509_POLICY_CONSTRAINTS = 57
X509_CROSS_CERT_DIST_POINTS = 58
CMC_DATA = 59
CMC_RESPONSE = 60
CMC_STATUS = 61
CMC_ADD_EXTENSIONS = 62
CMC_ADD_ATTRIBUTES = 63
X509_CERTIFICATE_TEMPLATE = 64
OCSP_SIGNED_REQUEST = 65
OCSP_REQUEST = 66
OCSP_RESPONSE = 67
OCSP_BASIC_SIGNED_RESPONSE = 68
OCSP_BASIC_RESPONSE = 69
X509_LOGOTYPE_EXT = 70
X509_BIOMETRIC_EXT = 71
CNG_RSA_PUBLIC_KEY_BLOB = 72
X509_OBJECT_IDENTIFIER = 73
X509_ALGORITHM_IDENTIFIER = 74
PKCS_RSA_SSA_PSS_PARAMETERS = 75
PKCS_RSAES_OAEP_PARAMETERS = 76
ECC_CMS_SHARED_INFO = 77
TIMESTAMP_REQUEST = 78
TIMESTAMP_RESPONSE = 79
TIMESTAMP_INFO = 80
X509_CERT_BUNDLE = 81
PKCS7_SIGNER_INFO = 500
CMS_SIGNER_INFO = 501
szOID_AUTHORITY_KEY_IDENTIFIER = "2.5.29.1"
szOID_KEY_ATTRIBUTES = "2.5.29.2"
szOID_CERT_POLICIES_95 = "2.5.29.3"
szOID_KEY_USAGE_RESTRICTION = "2.5.29.4"
szOID_SUBJECT_ALT_NAME = "2.5.29.7"
szOID_ISSUER_ALT_NAME = "2.5.29.8"
szOID_BASIC_CONSTRAINTS = "2.5.29.10"
szOID_KEY_USAGE = "2.5.29.15"
szOID_PRIVATEKEY_USAGE_PERIOD = "2.5.29.16"
szOID_BASIC_CONSTRAINTS2 = "2.5.29.19"
szOID_CERT_POLICIES = "2.5.29.32"
szOID_ANY_CERT_POLICY = "2.5.29.32.0"
szOID_AUTHORITY_KEY_IDENTIFIER2 = "2.5.29.35"
szOID_SUBJECT_KEY_IDENTIFIER = "2.5.29.14"
szOID_SUBJECT_ALT_NAME2 = "2.5.29.17"
szOID_ISSUER_ALT_NAME2 = "2.5.29.18"
szOID_CRL_REASON_CODE = "2.5.29.21"
szOID_REASON_CODE_HOLD = "2.5.29.23"
szOID_CRL_DIST_POINTS = "2.5.29.31"
szOID_ENHANCED_KEY_USAGE = "2.5.29.37"
szOID_CRL_NUMBER = "2.5.29.20"
szOID_DELTA_CRL_INDICATOR = "2.5.29.27"
szOID_ISSUING_DIST_POINT = "2.5.29.28"
szOID_FRESHEST_CRL = "2.5.29.46"
szOID_NAME_CONSTRAINTS = "2.5.29.30"
szOID_POLICY_MAPPINGS = "2.5.29.33"
szOID_LEGACY_POLICY_MAPPINGS = "2.5.29.5"
szOID_POLICY_CONSTRAINTS = "2.5.29.36"
szOID_RENEWAL_CERTIFICATE = "1.3.6.1.4.1.311.13.1"
szOID_ENROLLMENT_NAME_VALUE_PAIR = "1.3.6.1.4.1.311.13.2.1"
szOID_ENROLLMENT_CSP_PROVIDER = "1.3.6.1.4.1.311.13.2.2"
szOID_OS_VERSION = "1.3.6.1.4.1.311.13.2.3"
szOID_ENROLLMENT_AGENT = "1.3.6.1.4.1.311.20.2.1"
szOID_PKIX = "1.3.6.1.5.5.7"
szOID_PKIX_PE = "1.3.6.1.5.5.7.1"
szOID_AUTHORITY_INFO_ACCESS = "1.3.6.1.5.5.7.1.1"
szOID_CERT_EXTENSIONS = "1.3.6.1.4.1.311.2.1.14"
szOID_NEXT_UPDATE_LOCATION = "1.3.6.1.4.1.311.10.2"
szOID_REMOVE_CERTIFICATE = "1.3.6.1.4.1.311.10.8.1"
szOID_CROSS_CERT_DIST_POINTS = "1.3.6.1.4.1.311.10.9.1"
szOID_CTL = "1.3.6.1.4.1.311.10.1"
szOID_SORTED_CTL = "1.3.6.1.4.1.311.10.1.1"
szOID_SERIALIZED = "1.3.6.1.4.1.311.10.3.3.1"
szOID_NT_PRINCIPAL_NAME = "1.3.6.1.4.1.311.20.2.3"
szOID_PRODUCT_UPDATE = "1.3.6.1.4.1.311.31.1"
szOID_ANY_APPLICATION_POLICY = "1.3.6.1.4.1.311.10.12.1"
szOID_AUTO_ENROLL_CTL_USAGE = "1.3.6.1.4.1.311.20.1"
szOID_ENROLL_CERTTYPE_EXTENSION = "1.3.6.1.4.1.311.20.2"
szOID_CERT_MANIFOLD = "1.3.6.1.4.1.311.20.3"
szOID_CERTSRV_CA_VERSION = "1.3.6.1.4.1.311.21.1"
szOID_CERTSRV_PREVIOUS_CERT_HASH = "1.3.6.1.4.1.311.21.2"
szOID_CRL_VIRTUAL_BASE = "1.3.6.1.4.1.311.21.3"
szOID_CRL_NEXT_PUBLISH = "1.3.6.1.4.1.311.21.4"
szOID_KP_CA_EXCHANGE = "1.3.6.1.4.1.311.21.5"
szOID_KP_KEY_RECOVERY_AGENT = "1.3.6.1.4.1.311.21.6"
szOID_CERTIFICATE_TEMPLATE = "1.3.6.1.4.1.311.21.7"
szOID_ENTERPRISE_OID_ROOT = "1.3.6.1.4.1.311.21.8"
szOID_RDN_DUMMY_SIGNER = "1.3.6.1.4.1.311.21.9"
szOID_APPLICATION_CERT_POLICIES = "1.3.6.1.4.1.311.21.10"
szOID_APPLICATION_POLICY_MAPPINGS = "1.3.6.1.4.1.311.21.11"
szOID_APPLICATION_POLICY_CONSTRAINTS = "1.3.6.1.4.1.311.21.12"
szOID_ARCHIVED_KEY_ATTR = "1.3.6.1.4.1.311.21.13"
szOID_CRL_SELF_CDP = "1.3.6.1.4.1.311.21.14"
szOID_REQUIRE_CERT_CHAIN_POLICY = "1.3.6.1.4.1.311.21.15"
szOID_ARCHIVED_KEY_CERT_HASH = "1.3.6.1.4.1.311.21.16"
szOID_ISSUED_CERT_HASH = "1.3.6.1.4.1.311.21.17"
szOID_DS_EMAIL_REPLICATION = "1.3.6.1.4.1.311.21.19"
szOID_REQUEST_CLIENT_INFO = "1.3.6.1.4.1.311.21.20"
szOID_ENCRYPTED_KEY_HASH = "1.3.6.1.4.1.311.21.21"
szOID_CERTSRV_CROSSCA_VERSION = "1.3.6.1.4.1.311.21.22"
szOID_NTDS_REPLICATION = "1.3.6.1.4.1.311.25.1"
szOID_SUBJECT_DIR_ATTRS = "2.5.29.9"
szOID_PKIX_KP = "1.3.6.1.5.5.7.3"
szOID_PKIX_KP_SERVER_AUTH = "1.3.6.1.5.5.7.3.1"
szOID_PKIX_KP_CLIENT_AUTH = "1.3.6.1.5.5.7.3.2"
szOID_PKIX_KP_CODE_SIGNING = "1.3.6.1.5.5.7.3.3"
szOID_PKIX_KP_EMAIL_PROTECTION = "1.3.6.1.5.5.7.3.4"
szOID_PKIX_KP_IPSEC_END_SYSTEM = "1.3.6.1.5.5.7.3.5"
szOID_PKIX_KP_IPSEC_TUNNEL = "1.3.6.1.5.5.7.3.6"
szOID_PKIX_KP_IPSEC_USER = "1.3.6.1.5.5.7.3.7"
szOID_PKIX_KP_TIMESTAMP_SIGNING = "1.3.6.1.5.5.7.3.8"
szOID_IPSEC_KP_IKE_INTERMEDIATE = "1.3.6.1.5.5.8.2.2"
szOID_KP_CTL_USAGE_SIGNING = "1.3.6.1.4.1.311.10.3.1"
szOID_KP_TIME_STAMP_SIGNING = "1.3.6.1.4.1.311.10.3.2"
szOID_SERVER_GATED_CRYPTO = "1.3.6.1.4.1.311.10.3.3"
szOID_SGC_NETSCAPE = "2.16.840.1.113730.4.1"
szOID_KP_EFS = "1.3.6.1.4.1.311.10.3.4"
szOID_EFS_RECOVERY = "1.3.6.1.4.1.311.10.3.4.1"
szOID_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.5"
szOID_NT5_CRYPTO = "1.3.6.1.4.1.311.10.3.6"
szOID_OEM_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.7"
szOID_EMBEDDED_NT_CRYPTO = "1.3.6.1.4.1.311.10.3.8"
szOID_ROOT_LIST_SIGNER = "1.3.6.1.4.1.311.10.3.9"
szOID_KP_QUALIFIED_SUBORDINATION = "1.3.6.1.4.1.311.10.3.10"
szOID_KP_KEY_RECOVERY = "1.3.6.1.4.1.311.10.3.11"
szOID_KP_DOCUMENT_SIGNING = "1.3.6.1.4.1.311.10.3.12"
szOID_KP_LIFETIME_SIGNING = "1.3.6.1.4.1.311.10.3.13"
szOID_KP_MOBILE_DEVICE_SOFTWARE = "1.3.6.1.4.1.311.10.3.14"
szOID_DRM = "1.3.6.1.4.1.311.10.5.1"
szOID_DRM_INDIVIDUALIZATION = "1.3.6.1.4.1.311.10.5.2"
szOID_LICENSES = "1.3.6.1.4.1.311.10.6.1"
szOID_LICENSE_SERVER = "1.3.6.1.4.1.311.10.6.2"
szOID_KP_SMARTCARD_LOGON = "1.3.6.1.4.1.311.20.2.2"
szOID_YESNO_TRUST_ATTR = "1.3.6.1.4.1.311.10.4.1"
szOID_PKIX_POLICY_QUALIFIER_CPS = "1.3.6.1.5.5.7.2.1"
szOID_PKIX_POLICY_QUALIFIER_USERNOTICE = "1.3.6.1.5.5.7.2.2"
szOID_CERT_POLICIES_95_QUALIFIER1 = "2.16.840.1.113733.1.7.1.1"
CERT_UNICODE_RDN_ERR_INDEX_MASK = 0x3FF
CERT_UNICODE_RDN_ERR_INDEX_SHIFT = 22
CERT_UNICODE_ATTR_ERR_INDEX_MASK = 0x003F
CERT_UNICODE_ATTR_ERR_INDEX_SHIFT = 16
CERT_UNICODE_VALUE_ERR_INDEX_MASK = 0x0000FFFF
CERT_UNICODE_VALUE_ERR_INDEX_SHIFT = 0
CERT_DIGITAL_SIGNATURE_KEY_USAGE = 0x80
CERT_NON_REPUDIATION_KEY_USAGE = 0x40
CERT_KEY_ENCIPHERMENT_KEY_USAGE = 0x20
CERT_DATA_ENCIPHERMENT_KEY_USAGE = 0x10
CERT_KEY_AGREEMENT_KEY_USAGE = 0x08
CERT_KEY_CERT_SIGN_KEY_USAGE = 0x04
CERT_OFFLINE_CRL_SIGN_KEY_USAGE = 0x02
CERT_CRL_SIGN_KEY_USAGE = 0x02
CERT_ENCIPHER_ONLY_KEY_USAGE = 0x01
CERT_DECIPHER_ONLY_KEY_USAGE = 0x80
CERT_ALT_NAME_OTHER_NAME = 1
CERT_ALT_NAME_RFC822_NAME = 2
CERT_ALT_NAME_DNS_NAME = 3
CERT_ALT_NAME_X400_ADDRESS = 4
CERT_ALT_NAME_DIRECTORY_NAME = 5
CERT_ALT_NAME_EDI_PARTY_NAME = 6
CERT_ALT_NAME_URL = 7
CERT_ALT_NAME_IP_ADDRESS = 8
CERT_ALT_NAME_REGISTERED_ID = 9
CERT_ALT_NAME_ENTRY_ERR_INDEX_MASK = 0xFF
CERT_ALT_NAME_ENTRY_ERR_INDEX_SHIFT = 16
CERT_ALT_NAME_VALUE_ERR_INDEX_MASK = 0x0000FFFF
CERT_ALT_NAME_VALUE_ERR_INDEX_SHIFT = 0
CERT_CA_SUBJECT_FLAG = 0x80
CERT_END_ENTITY_SUBJECT_FLAG = 0x40
szOID_PKIX_ACC_DESCR = "1.3.6.1.5.5.7.48"
szOID_PKIX_OCSP = "1.3.6.1.5.5.7.48.1"
szOID_PKIX_CA_ISSUERS = "1.3.6.1.5.5.7.48.2"
CRL_REASON_UNSPECIFIED = 0
CRL_REASON_KEY_COMPROMISE = 1
CRL_REASON_CA_COMPROMISE = 2
CRL_REASON_AFFILIATION_CHANGED = 3
CRL_REASON_SUPERSEDED = 4
CRL_REASON_CESSATION_OF_OPERATION = 5
CRL_REASON_CERTIFICATE_HOLD = 6
CRL_REASON_REMOVE_FROM_CRL = 8
CRL_DIST_POINT_NO_NAME = 0
CRL_DIST_POINT_FULL_NAME = 1
CRL_DIST_POINT_ISSUER_RDN_NAME = 2
CRL_REASON_UNUSED_FLAG = 0x80
CRL_REASON_KEY_COMPROMISE_FLAG = 0x40
CRL_REASON_CA_COMPROMISE_FLAG = 0x20
CRL_REASON_AFFILIATION_CHANGED_FLAG = 0x10
CRL_REASON_SUPERSEDED_FLAG = 0x08
CRL_REASON_CESSATION_OF_OPERATION_FLAG = 0x04
CRL_REASON_CERTIFICATE_HOLD_FLAG = 0x02
CRL_DIST_POINT_ERR_INDEX_MASK = 0x7F
CRL_DIST_POINT_ERR_INDEX_SHIFT = 24
CRL_DIST_POINT_ERR_CRL_ISSUER_BIT = (-2147483648)
CROSS_CERT_DIST_POINT_ERR_INDEX_MASK = 0xFF
CROSS_CERT_DIST_POINT_ERR_INDEX_SHIFT = 24
CERT_EXCLUDED_SUBTREE_BIT = (-2147483648)
SORTED_CTL_EXT_FLAGS_OFFSET = (0*4)
SORTED_CTL_EXT_COUNT_OFFSET = (1*4)
SORTED_CTL_EXT_MAX_COLLISION_OFFSET = (2*4)
SORTED_CTL_EXT_HASH_BUCKET_OFFSET = (3*4)
SORTED_CTL_EXT_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x1
CERT_DSS_R_LEN = 20
CERT_DSS_S_LEN = 20
CERT_DSS_SIGNATURE_LEN = (CERT_DSS_R_LEN + CERT_DSS_S_LEN)
CERT_MAX_ASN_ENCODED_DSS_SIGNATURE_LEN = (2 + 2*(2 + 20 +1))
CRYPT_X942_COUNTER_BYTE_LENGTH = 4
CRYPT_X942_KEY_LENGTH_BYTE_LENGTH = 4
CRYPT_X942_PUB_INFO_BYTE_LENGTH = (512/8)
CRYPT_RC2_40BIT_VERSION = 160
CRYPT_RC2_56BIT_VERSION = 52
CRYPT_RC2_64BIT_VERSION = 120
CRYPT_RC2_128BIT_VERSION = 58
szOID_VERISIGN_PRIVATE_6_9 = "2.16.840.1.113733.1.6.9"
szOID_VERISIGN_ONSITE_JURISDICTION_HASH = "2.16.840.1.113733.1.6.11"
szOID_VERISIGN_BITSTRING_6_13 = "2.16.840.1.113733.1.6.13"
szOID_VERISIGN_ISS_STRONG_CRYPTO = "2.16.840.1.113733.1.8.1"
szOID_NETSCAPE = "2.16.840.1.113730"
szOID_NETSCAPE_CERT_EXTENSION = "2.16.840.1.113730.1"
szOID_NETSCAPE_CERT_TYPE = "2.16.840.1.113730.1.1"
szOID_NETSCAPE_BASE_URL = "2.16.840.1.113730.1.2"
szOID_NETSCAPE_REVOCATION_URL = "2.16.840.1.113730.1.3"
szOID_NETSCAPE_CA_REVOCATION_URL = "2.16.840.1.113730.1.4"
szOID_NETSCAPE_CERT_RENEWAL_URL = "2.16.840.1.113730.1.7"
szOID_NETSCAPE_CA_POLICY_URL = "2.16.840.1.113730.1.8"
szOID_NETSCAPE_SSL_SERVER_NAME = "2.16.840.1.113730.1.12"
szOID_NETSCAPE_COMMENT = "2.16.840.1.113730.1.13"
szOID_NETSCAPE_DATA_TYPE = "2.16.840.1.113730.2"
szOID_NETSCAPE_CERT_SEQUENCE = "2.16.840.1.113730.2.5"
NETSCAPE_SSL_CLIENT_AUTH_CERT_TYPE = 0x80
NETSCAPE_SSL_SERVER_AUTH_CERT_TYPE = 0x40
NETSCAPE_SMIME_CERT_TYPE = 0x20
NETSCAPE_SIGN_CERT_TYPE = 0x10
NETSCAPE_SSL_CA_CERT_TYPE = 0x04
NETSCAPE_SMIME_CA_CERT_TYPE = 0x02
NETSCAPE_SIGN_CA_CERT_TYPE = 0x01
szOID_CT_PKI_DATA = "1.3.6.1.5.5.7.12.2"
szOID_CT_PKI_RESPONSE = "1.3.6.1.5.5.7.12.3"
szOID_PKIX_NO_SIGNATURE = "1.3.6.1.5.5.7.6.2"
szOID_CMC = "1.3.6.1.5.5.7.7"
szOID_CMC_STATUS_INFO = "1.3.6.1.5.5.7.7.1"
szOID_CMC_IDENTIFICATION = "1.3.6.1.5.5.7.7.2"
szOID_CMC_IDENTITY_PROOF = "1.3.6.1.5.5.7.7.3"
szOID_CMC_DATA_RETURN = "1.3.6.1.5.5.7.7.4"
szOID_CMC_TRANSACTION_ID = "1.3.6.1.5.5.7.7.5"
szOID_CMC_SENDER_NONCE = "1.3.6.1.5.5.7.7.6"
szOID_CMC_RECIPIENT_NONCE = "1.3.6.1.5.5.7.7.7"
szOID_CMC_ADD_EXTENSIONS = "1.3.6.1.5.5.7.7.8"
szOID_CMC_ENCRYPTED_POP = "1.3.6.1.5.5.7.7.9"
szOID_CMC_DECRYPTED_POP = "1.3.6.1.5.5.7.7.10"
szOID_CMC_LRA_POP_WITNESS = "1.3.6.1.5.5.7.7.11"
szOID_CMC_GET_CERT = "1.3.6.1.5.5.7.7.15"
szOID_CMC_GET_CRL = "1.3.6.1.5.5.7.7.16"
szOID_CMC_REVOKE_REQUEST = "1.3.6.1.5.5.7.7.17"
szOID_CMC_REG_INFO = "1.3.6.1.5.5.7.7.18"
szOID_CMC_RESPONSE_INFO = "1.3.6.1.5.5.7.7.19"
szOID_CMC_QUERY_PENDING = "1.3.6.1.5.5.7.7.21"
szOID_CMC_ID_POP_LINK_RANDOM = "1.3.6.1.5.5.7.7.22"
szOID_CMC_ID_POP_LINK_WITNESS = "1.3.6.1.5.5.7.7.23"
szOID_CMC_ID_CONFIRM_CERT_ACCEPTANCE = "1.3.6.1.5.5.7.7.24"
szOID_CMC_ADD_ATTRIBUTES = "1.3.6.1.4.1.311.10.10.1"
CMC_TAGGED_CERT_REQUEST_CHOICE = 1
CMC_OTHER_INFO_NO_CHOICE = 0
CMC_OTHER_INFO_FAIL_CHOICE = 1
CMC_OTHER_INFO_PEND_CHOICE = 2
CMC_STATUS_SUCCESS = 0
CMC_STATUS_FAILED = 2
CMC_STATUS_PENDING = 3
CMC_STATUS_NO_SUPPORT = 4
CMC_STATUS_CONFIRM_REQUIRED = 5
CMC_FAIL_BAD_ALG = 0
CMC_FAIL_BAD_MESSAGE_CHECK = 1
CMC_FAIL_BAD_REQUEST = 2
CMC_FAIL_BAD_TIME = 3
CMC_FAIL_BAD_CERT_ID = 4
CMC_FAIL_UNSUPORTED_EXT = 5
CMC_FAIL_MUST_ARCHIVE_KEYS = 6
CMC_FAIL_BAD_IDENTITY = 7
CMC_FAIL_POP_REQUIRED = 8
CMC_FAIL_POP_FAILED = 9
CMC_FAIL_NO_KEY_REUSE = 10
CMC_FAIL_INTERNAL_CA_ERROR = 11
CMC_FAIL_TRY_LATER = 12
CRYPT_OID_ENCODE_OBJECT_FUNC = "CryptDllEncodeObject"
CRYPT_OID_DECODE_OBJECT_FUNC = "CryptDllDecodeObject"
CRYPT_OID_ENCODE_OBJECT_EX_FUNC = "CryptDllEncodeObjectEx"
CRYPT_OID_DECODE_OBJECT_EX_FUNC = "CryptDllDecodeObjectEx"
CRYPT_OID_CREATE_COM_OBJECT_FUNC = "CryptDllCreateCOMObject"
CRYPT_OID_VERIFY_REVOCATION_FUNC = "CertDllVerifyRevocation"
CRYPT_OID_VERIFY_CTL_USAGE_FUNC = "CertDllVerifyCTLUsage"
CRYPT_OID_FORMAT_OBJECT_FUNC = "CryptDllFormatObject"
CRYPT_OID_FIND_OID_INFO_FUNC = "CryptDllFindOIDInfo"
CRYPT_OID_FIND_LOCALIZED_NAME_FUNC = "CryptDllFindLocalizedName"
CRYPT_OID_REGPATH = "Software\\Microsoft\\Cryptography\\OID"
CRYPT_OID_REG_ENCODING_TYPE_PREFIX = "EncodingType "
CRYPT_OID_REG_DLL_VALUE_NAME = u"Dll"
CRYPT_OID_REG_FUNC_NAME_VALUE_NAME = u"FuncName"
CRYPT_OID_REG_FUNC_NAME_VALUE_NAME_A = "FuncName"
CRYPT_OID_REG_FLAGS_VALUE_NAME = u"CryptFlags"
CRYPT_DEFAULT_OID = "DEFAULT"
CRYPT_INSTALL_OID_FUNC_BEFORE_FLAG = 1
CRYPT_GET_INSTALLED_OID_FUNC_FLAG = 0x1
CRYPT_REGISTER_FIRST_INDEX = 0
CRYPT_REGISTER_LAST_INDEX = (-1)
CRYPT_MATCH_ANY_ENCODING_TYPE = (-1)
CRYPT_HASH_ALG_OID_GROUP_ID = 1
CRYPT_ENCRYPT_ALG_OID_GROUP_ID = 2
CRYPT_PUBKEY_ALG_OID_GROUP_ID = 3
CRYPT_SIGN_ALG_OID_GROUP_ID = 4
CRYPT_RDN_ATTR_OID_GROUP_ID = 5
CRYPT_EXT_OR_ATTR_OID_GROUP_ID = 6
CRYPT_ENHKEY_USAGE_OID_GROUP_ID = 7
CRYPT_POLICY_OID_GROUP_ID = 8
CRYPT_TEMPLATE_OID_GROUP_ID = 9
CRYPT_LAST_OID_GROUP_ID = 9
CRYPT_FIRST_ALG_OID_GROUP_ID = CRYPT_HASH_ALG_OID_GROUP_ID
CRYPT_LAST_ALG_OID_GROUP_ID = CRYPT_SIGN_ALG_OID_GROUP_ID
CRYPT_OID_INHIBIT_SIGNATURE_FORMAT_FLAG = 0x1
CRYPT_OID_USE_PUBKEY_PARA_FOR_PKCS7_FLAG = 0x2
CRYPT_OID_NO_NULL_ALGORITHM_PARA_FLAG = 0x4
CRYPT_OID_INFO_OID_KEY = 1
CRYPT_OID_INFO_NAME_KEY = 2
CRYPT_OID_INFO_ALGID_KEY = 3
CRYPT_OID_INFO_SIGN_KEY = 4
CRYPT_INSTALL_OID_INFO_BEFORE_FLAG = 1
CRYPT_LOCALIZED_NAME_ENCODING_TYPE = 0
CRYPT_LOCALIZED_NAME_OID = "LocalizedNames"
szOID_PKCS_7_DATA = "1.2.840.113549.1.7.1"
szOID_PKCS_7_SIGNED = "1.2.840.113549.1.7.2"
szOID_PKCS_7_ENVELOPED = "1.2.840.113549.1.7.3"
szOID_PKCS_7_SIGNEDANDENVELOPED = "1.2.840.113549.1.7.4"
szOID_PKCS_7_DIGESTED = "1.2.840.113549.1.7.5"
szOID_PKCS_7_ENCRYPTED = "1.2.840.113549.1.7.6"
szOID_PKCS_9_CONTENT_TYPE = "1.2.840.113549.1.9.3"
szOID_PKCS_9_MESSAGE_DIGEST = "1.2.840.113549.1.9.4"
CMSG_DATA = 1
CMSG_SIGNED = 2
CMSG_ENVELOPED = 3
CMSG_SIGNED_AND_ENVELOPED = 4
CMSG_HASHED = 5
CMSG_ENCRYPTED = 6
CMSG_ALL_FLAGS = -1
CMSG_DATA_FLAG = (1 << CMSG_DATA)
CMSG_SIGNED_FLAG = (1 << CMSG_SIGNED)
CMSG_ENVELOPED_FLAG = (1 << CMSG_ENVELOPED)
CMSG_SIGNED_AND_ENVELOPED_FLAG = (1 << CMSG_SIGNED_AND_ENVELOPED)
CMSG_HASHED_FLAG = (1 << CMSG_HASHED)
CMSG_ENCRYPTED_FLAG = (1 << CMSG_ENCRYPTED)
CERT_ID_ISSUER_SERIAL_NUMBER = 1
CERT_ID_KEY_IDENTIFIER = 2
CERT_ID_SHA1_HASH = 3
CMSG_KEY_AGREE_EPHEMERAL_KEY_CHOICE = 1
CMSG_KEY_AGREE_STATIC_KEY_CHOICE = 2
CMSG_MAIL_LIST_HANDLE_KEY_CHOICE = 1
CMSG_KEY_TRANS_RECIPIENT = 1
CMSG_KEY_AGREE_RECIPIENT = 2
CMSG_MAIL_LIST_RECIPIENT = 3
CMSG_SP3_COMPATIBLE_ENCRYPT_FLAG = (-2147483648)
CMSG_RC4_NO_SALT_FLAG = 0x40000000
CMSG_INDEFINITE_LENGTH = ((-1))
CMSG_BARE_CONTENT_FLAG = 0x00000001
CMSG_LENGTH_ONLY_FLAG = 0x00000002
CMSG_DETACHED_FLAG = 0x00000004
CMSG_AUTHENTICATED_ATTRIBUTES_FLAG = 0x00000008
CMSG_CONTENTS_OCTETS_FLAG = 0x00000010
CMSG_MAX_LENGTH_FLAG = 0x00000020
CMSG_CMS_ENCAPSULATED_CONTENT_FLAG = 0x00000040
CMSG_CRYPT_RELEASE_CONTEXT_FLAG = 0x00008000
CMSG_TYPE_PARAM = 1
CMSG_CONTENT_PARAM = 2
CMSG_BARE_CONTENT_PARAM = 3
CMSG_INNER_CONTENT_TYPE_PARAM = 4
CMSG_SIGNER_COUNT_PARAM = 5
CMSG_SIGNER_INFO_PARAM = 6
CMSG_SIGNER_CERT_INFO_PARAM = 7
CMSG_SIGNER_HASH_ALGORITHM_PARAM = 8
CMSG_SIGNER_AUTH_ATTR_PARAM = 9
CMSG_SIGNER_UNAUTH_ATTR_PARAM = 10
CMSG_CERT_COUNT_PARAM = 11
CMSG_CERT_PARAM = 12
CMSG_CRL_COUNT_PARAM = 13
CMSG_CRL_PARAM = 14
CMSG_ENVELOPE_ALGORITHM_PARAM = 15
CMSG_RECIPIENT_COUNT_PARAM = 17
CMSG_RECIPIENT_INDEX_PARAM = 18
CMSG_RECIPIENT_INFO_PARAM = 19
CMSG_HASH_ALGORITHM_PARAM = 20
CMSG_HASH_DATA_PARAM = 21
CMSG_COMPUTED_HASH_PARAM = 22
CMSG_ENCRYPT_PARAM = 26
CMSG_ENCRYPTED_DIGEST = 27
CMSG_ENCODED_SIGNER = 28
CMSG_ENCODED_MESSAGE = 29
CMSG_VERSION_PARAM = 30
CMSG_ATTR_CERT_COUNT_PARAM = 31
CMSG_ATTR_CERT_PARAM = 32
CMSG_CMS_RECIPIENT_COUNT_PARAM = 33
CMSG_CMS_RECIPIENT_INDEX_PARAM = 34
CMSG_CMS_RECIPIENT_ENCRYPTED_KEY_INDEX_PARAM = 35
CMSG_CMS_RECIPIENT_INFO_PARAM = 36
CMSG_UNPROTECTED_ATTR_PARAM = 37
CMSG_SIGNER_CERT_ID_PARAM = 38
CMSG_CMS_SIGNER_INFO_PARAM = 39
CMSG_SIGNED_DATA_V1 = 1
CMSG_SIGNED_DATA_V3 = 3
CMSG_SIGNED_DATA_PKCS_1_5_VERSION = CMSG_SIGNED_DATA_V1
CMSG_SIGNED_DATA_CMS_VERSION = CMSG_SIGNED_DATA_V3
CMSG_SIGNER_INFO_V1 = 1
CMSG_SIGNER_INFO_V3 = 3
CMSG_SIGNER_INFO_PKCS_1_5_VERSION = CMSG_SIGNER_INFO_V1
CMSG_SIGNER_INFO_CMS_VERSION = CMSG_SIGNER_INFO_V3
CMSG_HASHED_DATA_V0 = 0
CMSG_HASHED_DATA_V2 = 2
CMSG_HASHED_DATA_PKCS_1_5_VERSION = CMSG_HASHED_DATA_V0
CMSG_HASHED_DATA_CMS_VERSION = CMSG_HASHED_DATA_V2
CMSG_ENVELOPED_DATA_V0 = 0
CMSG_ENVELOPED_DATA_V2 = 2
CMSG_ENVELOPED_DATA_PKCS_1_5_VERSION = CMSG_ENVELOPED_DATA_V0
CMSG_ENVELOPED_DATA_CMS_VERSION = CMSG_ENVELOPED_DATA_V2
CMSG_KEY_AGREE_ORIGINATOR_CERT = 1
CMSG_KEY_AGREE_ORIGINATOR_PUBLIC_KEY = 2
CMSG_ENVELOPED_RECIPIENT_V0 = 0
CMSG_ENVELOPED_RECIPIENT_V2 = 2
CMSG_ENVELOPED_RECIPIENT_V3 = 3
CMSG_ENVELOPED_RECIPIENT_V4 = 4
CMSG_KEY_TRANS_PKCS_1_5_VERSION = CMSG_ENVELOPED_RECIPIENT_V0
CMSG_KEY_TRANS_CMS_VERSION = CMSG_ENVELOPED_RECIPIENT_V2
CMSG_KEY_AGREE_VERSION = CMSG_ENVELOPED_RECIPIENT_V3
CMSG_MAIL_LIST_VERSION = CMSG_ENVELOPED_RECIPIENT_V4
CMSG_CTRL_VERIFY_SIGNATURE = 1
CMSG_CTRL_DECRYPT = 2
CMSG_CTRL_VERIFY_HASH = 5
CMSG_CTRL_ADD_SIGNER = 6
CMSG_CTRL_DEL_SIGNER = 7
CMSG_CTRL_ADD_SIGNER_UNAUTH_ATTR = 8
CMSG_CTRL_DEL_SIGNER_UNAUTH_ATTR = 9
CMSG_CTRL_ADD_CERT = 10
CMSG_CTRL_DEL_CERT = 11
CMSG_CTRL_ADD_CRL = 12
CMSG_CTRL_DEL_CRL = 13
CMSG_CTRL_ADD_ATTR_CERT = 14
CMSG_CTRL_DEL_ATTR_CERT = 15
CMSG_CTRL_KEY_TRANS_DECRYPT = 16
CMSG_CTRL_KEY_AGREE_DECRYPT = 17
CMSG_CTRL_MAIL_LIST_DECRYPT = 18
CMSG_CTRL_VERIFY_SIGNATURE_EX = 19
CMSG_CTRL_ADD_CMS_SIGNER_INFO = 20
CMSG_VERIFY_SIGNER_PUBKEY = 1
CMSG_VERIFY_SIGNER_CERT = 2
CMSG_VERIFY_SIGNER_CHAIN = 3
CMSG_VERIFY_SIGNER_NULL = 4
CMSG_OID_GEN_ENCRYPT_KEY_FUNC = "CryptMsgDllGenEncryptKey"
CMSG_OID_EXPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllExportEncryptKey"
CMSG_OID_IMPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllImportEncryptKey"
CMSG_CONTENT_ENCRYPT_PAD_ENCODED_LEN_FLAG = 0x00000001
CMSG_DEFAULT_INSTALLABLE_FUNC_OID = 1
CMSG_CONTENT_ENCRYPT_FREE_PARA_FLAG = 0x00000001
CMSG_CONTENT_ENCRYPT_RELEASE_CONTEXT_FLAG = 0x00008000
CMSG_OID_GEN_CONTENT_ENCRYPT_KEY_FUNC = "CryptMsgDllGenContentEncryptKey"
CMSG_KEY_TRANS_ENCRYPT_FREE_PARA_FLAG = 0x00000001
CMSG_OID_EXPORT_KEY_TRANS_FUNC = "CryptMsgDllExportKeyTrans"
CMSG_KEY_AGREE_ENCRYPT_FREE_PARA_FLAG = 0x00000001
CMSG_KEY_AGREE_ENCRYPT_FREE_MATERIAL_FLAG = 0x00000002
CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_ALG_FLAG = 0x00000004
CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_PARA_FLAG = 0x00000008
CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_BITS_FLAG = 0x00000010
CMSG_OID_EXPORT_KEY_AGREE_FUNC = "CryptMsgDllExportKeyAgree"
CMSG_MAIL_LIST_ENCRYPT_FREE_PARA_FLAG = 0x00000001
CMSG_OID_EXPORT_MAIL_LIST_FUNC = "CryptMsgDllExportMailList"
CMSG_OID_IMPORT_KEY_TRANS_FUNC = "CryptMsgDllImportKeyTrans"
CMSG_OID_IMPORT_KEY_AGREE_FUNC = "CryptMsgDllImportKeyAgree"
CMSG_OID_IMPORT_MAIL_LIST_FUNC = "CryptMsgDllImportMailList"
# Certificate property id's used with CertGetCertificateContextProperty
CERT_KEY_PROV_HANDLE_PROP_ID = 1
CERT_KEY_PROV_INFO_PROP_ID = 2
CERT_SHA1_HASH_PROP_ID = 3
CERT_MD5_HASH_PROP_ID = 4
CERT_HASH_PROP_ID = CERT_SHA1_HASH_PROP_ID
CERT_KEY_CONTEXT_PROP_ID = 5
CERT_KEY_SPEC_PROP_ID = 6
CERT_IE30_RESERVED_PROP_ID = 7
CERT_PUBKEY_HASH_RESERVED_PROP_ID = 8
CERT_ENHKEY_USAGE_PROP_ID = 9
CERT_CTL_USAGE_PROP_ID = CERT_ENHKEY_USAGE_PROP_ID
CERT_NEXT_UPDATE_LOCATION_PROP_ID = 10
CERT_FRIENDLY_NAME_PROP_ID = 11
CERT_PVK_FILE_PROP_ID = 12
CERT_DESCRIPTION_PROP_ID = 13
CERT_ACCESS_STATE_PROP_ID = 14
CERT_SIGNATURE_HASH_PROP_ID = 15
CERT_SMART_CARD_DATA_PROP_ID = 16
CERT_EFS_PROP_ID = 17
CERT_FORTEZZA_DATA_PROP_ID = 18
CERT_ARCHIVED_PROP_ID = 19
CERT_KEY_IDENTIFIER_PROP_ID = 20
CERT_AUTO_ENROLL_PROP_ID = 21
CERT_PUBKEY_ALG_PARA_PROP_ID = 22
CERT_CROSS_CERT_DIST_POINTS_PROP_ID = 23
CERT_ISSUER_PUBLIC_KEY_MD5_HASH_PROP_ID = 24
CERT_SUBJECT_PUBLIC_KEY_MD5_HASH_PROP_ID = 25
CERT_ENROLLMENT_PROP_ID = 26
CERT_DATE_STAMP_PROP_ID = 27
CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = 28
CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = 29
CERT_EXTENDED_ERROR_INFO_PROP_ID = 30
CERT_RENEWAL_PROP_ID = 64
CERT_ARCHIVED_KEY_HASH_PROP_ID = 65
CERT_AUTO_ENROLL_RETRY_PROP_ID = 66
CERT_AIA_URL_RETRIEVED_PROP_ID = 67
CERT_AUTHORITY_INFO_ACCESS_PROP_ID = 68
CERT_BACKED_UP_PROP_ID = 69
CERT_OCSP_RESPONSE_PROP_ID = 70
CERT_REQUEST_ORIGINATOR_PROP_ID = 71
CERT_SOURCE_LOCATION_PROP_ID = 72
CERT_SOURCE_URL_PROP_ID = 73
CERT_NEW_KEY_PROP_ID = 74
CERT_OCSP_CACHE_PREFIX_PROP_ID = 75
CERT_SMART_CARD_ROOT_INFO_PROP_ID = 76
CERT_NO_AUTO_EXPIRE_CHECK_PROP_ID = 77
CERT_NCRYPT_KEY_HANDLE_PROP_ID = 78
CERT_HCRYPTPROV_OR_NCRYPT_KEY_HANDLE_PROP_ID = 79
CERT_SUBJECT_INFO_ACCESS_PROP_ID = 80
CERT_CA_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID = 81
CERT_CA_DISABLE_CRL_PROP_ID = 82
CERT_ROOT_PROGRAM_CERT_POLICIES_PROP_ID = 83
CERT_ROOT_PROGRAM_NAME_CONSTRAINTS_PROP_ID = 84
CERT_SUBJECT_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID = 85
CERT_SUBJECT_DISABLE_CRL_PROP_ID = 86
CERT_CEP_PROP_ID = 87
CERT_SIGN_HASH_CNG_ALG_PROP_ID = 89
CERT_SCARD_PIN_ID_PROP_ID = 90
CERT_SCARD_PIN_INFO_PROP_ID = 91
CERT_FIRST_RESERVED_PROP_ID = 92
CERT_LAST_RESERVED_PROP_ID = 0x00007FFF
CERT_FIRST_USER_PROP_ID = 0x00008000
CERT_LAST_USER_PROP_ID = 0x0000FFFF
szOID_CERT_PROP_ID_PREFIX = "1.3.6.1.4.1.311.10.11."
szOID_CERT_KEY_IDENTIFIER_PROP_ID = "1.3.6.1.4.1.311.10.11.20"
szOID_CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = \
"1.3.6.1.4.1.311.10.11.28"
szOID_CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = \
"1.3.6.1.4.1.311.10.11.29"
CERT_ACCESS_STATE_WRITE_PERSIST_FLAG = 0x1
CERT_ACCESS_STATE_SYSTEM_STORE_FLAG = 0x2
CERT_ACCESS_STATE_LM_SYSTEM_STORE_FLAG = 0x4
CERT_SET_KEY_PROV_HANDLE_PROP_ID = 0x00000001
CERT_SET_KEY_CONTEXT_PROP_ID = 0x00000001
sz_CERT_STORE_PROV_MEMORY = "Memory"
sz_CERT_STORE_PROV_FILENAME_W = "File"
sz_CERT_STORE_PROV_FILENAME = sz_CERT_STORE_PROV_FILENAME_W
sz_CERT_STORE_PROV_SYSTEM_W = "System"
sz_CERT_STORE_PROV_SYSTEM = sz_CERT_STORE_PROV_SYSTEM_W
sz_CERT_STORE_PROV_PKCS7 = "PKCS7"
sz_CERT_STORE_PROV_SERIALIZED = "Serialized"
sz_CERT_STORE_PROV_COLLECTION = "Collection"
sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W = "SystemRegistry"
sz_CERT_STORE_PROV_SYSTEM_REGISTRY = sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W
sz_CERT_STORE_PROV_PHYSICAL_W = "Physical"
sz_CERT_STORE_PROV_PHYSICAL = sz_CERT_STORE_PROV_PHYSICAL_W
sz_CERT_STORE_PROV_SMART_CARD_W = "SmartCard"
sz_CERT_STORE_PROV_SMART_CARD = sz_CERT_STORE_PROV_SMART_CARD_W
sz_CERT_STORE_PROV_LDAP_W = "Ldap"
sz_CERT_STORE_PROV_LDAP = sz_CERT_STORE_PROV_LDAP_W
CERT_STORE_SIGNATURE_FLAG = 0x00000001
CERT_STORE_TIME_VALIDITY_FLAG = 0x00000002
CERT_STORE_REVOCATION_FLAG = 0x00000004
CERT_STORE_NO_CRL_FLAG = 0x00010000
CERT_STORE_NO_ISSUER_FLAG = 0x00020000
CERT_STORE_BASE_CRL_FLAG = 0x00000100
CERT_STORE_DELTA_CRL_FLAG = 0x00000200
CERT_STORE_NO_CRYPT_RELEASE_FLAG = 0x00000001
CERT_STORE_SET_LOCALIZED_NAME_FLAG = 0x00000002
CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG = 0x00000004
CERT_STORE_DELETE_FLAG = 0x00000010
CERT_STORE_UNSAFE_PHYSICAL_FLAG = 0x00000020
CERT_STORE_SHARE_STORE_FLAG = 0x00000040
CERT_STORE_SHARE_CONTEXT_FLAG = 0x00000080
CERT_STORE_MANIFOLD_FLAG = 0x00000100
CERT_STORE_ENUM_ARCHIVED_FLAG = 0x00000200
CERT_STORE_UPDATE_KEYID_FLAG = 0x00000400
CERT_STORE_BACKUP_RESTORE_FLAG = 0x00000800
CERT_STORE_READONLY_FLAG = 0x00008000
CERT_STORE_OPEN_EXISTING_FLAG = 0x00004000
CERT_STORE_CREATE_NEW_FLAG = 0x00002000
CERT_STORE_MAXIMUM_ALLOWED_FLAG = 0x00001000
CERT_SYSTEM_STORE_MASK = (-65536)
CERT_SYSTEM_STORE_RELOCATE_FLAG = (-2147483648)
CERT_SYSTEM_STORE_UNPROTECTED_FLAG = 0x40000000
CERT_SYSTEM_STORE_LOCATION_MASK = 0x00FF0000
CERT_SYSTEM_STORE_LOCATION_SHIFT = 16
CERT_SYSTEM_STORE_CURRENT_USER_ID = 1
CERT_SYSTEM_STORE_LOCAL_MACHINE_ID = 2
CERT_SYSTEM_STORE_CURRENT_SERVICE_ID = 4
CERT_SYSTEM_STORE_SERVICES_ID = 5
CERT_SYSTEM_STORE_USERS_ID = 6
CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID = 7
CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID = 8
CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID = 9
CERT_SYSTEM_STORE_CURRENT_USER = \
(CERT_SYSTEM_STORE_CURRENT_USER_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_SYSTEM_STORE_LOCAL_MACHINE = \
(CERT_SYSTEM_STORE_LOCAL_MACHINE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_SYSTEM_STORE_CURRENT_SERVICE = \
(CERT_SYSTEM_STORE_CURRENT_SERVICE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_SYSTEM_STORE_SERVICES = \
(CERT_SYSTEM_STORE_SERVICES_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_SYSTEM_STORE_USERS = \
(CERT_SYSTEM_STORE_USERS_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY = \
(CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID << \
CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY = \
(CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID << \
CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE = \
(CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID << \
CERT_SYSTEM_STORE_LOCATION_SHIFT)
CERT_PROT_ROOT_DISABLE_CURRENT_USER_FLAG = 0x1
CERT_PROT_ROOT_INHIBIT_ADD_AT_INIT_FLAG = 0x2
CERT_PROT_ROOT_INHIBIT_PURGE_LM_FLAG = 0x4
CERT_PROT_ROOT_DISABLE_LM_AUTH_FLAG = 0x8
CERT_PROT_ROOT_ONLY_LM_GPT_FLAG = 0x8
CERT_PROT_ROOT_DISABLE_NT_AUTH_REQUIRED_FLAG = 0x10
CERT_PROT_ROOT_DISABLE_NOT_DEFINED_NAME_CONSTRAINT_FLAG = 0x20
CERT_TRUST_PUB_ALLOW_TRUST_MASK = 0x00000003
CERT_TRUST_PUB_ALLOW_END_USER_TRUST = 0x00000000
CERT_TRUST_PUB_ALLOW_MACHINE_ADMIN_TRUST = 0x00000001
CERT_TRUST_PUB_ALLOW_ENTERPRISE_ADMIN_TRUST = 0x00000002
CERT_TRUST_PUB_CHECK_PUBLISHER_REV_FLAG = 0x00000100
CERT_TRUST_PUB_CHECK_TIMESTAMP_REV_FLAG = 0x00000200
CERT_AUTH_ROOT_AUTO_UPDATE_LOCAL_MACHINE_REGPATH = ur"Software\Microsoft\SystemCertificates\AuthRoot\AutoUpdate"
CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_UNTRUSTED_ROOT_LOGGING_FLAG = 0x1
CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_PARTIAL_CHAIN_LOGGING_FLAG = 0x2
CERT_AUTH_ROOT_AUTO_UPDATE_ROOT_DIR_URL_VALUE_NAME = u"RootDirUrl"
CERT_AUTH_ROOT_AUTO_UPDATE_SYNC_DELTA_TIME_VALUE_NAME = u"SyncDeltaTime"
CERT_AUTH_ROOT_AUTO_UPDATE_FLAGS_VALUE_NAME = u"Flags"
CERT_AUTH_ROOT_CTL_FILENAME = u"authroot.stl"
CERT_AUTH_ROOT_CTL_FILENAME_A = "authroot.stl"
CERT_AUTH_ROOT_CAB_FILENAME = u"authrootstl.cab"
CERT_AUTH_ROOT_SEQ_FILENAME = "authrootseq.txt"
CERT_AUTH_ROOT_CERT_EXT = ".crt"
CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH = ur"Software\Policies\Microsoft\SystemCertificates"
CERT_EFSBLOB_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + ur"\EFS"
CERT_EFSBLOB_VALUE_NAME = u"EFSBlob"
CERT_PROT_ROOT_FLAGS_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH +ur"\Root\ProtectedRoots"
CERT_PROT_ROOT_FLAGS_VALUE_NAME = u"Flags"
CERT_TRUST_PUB_SAFER_GROUP_POLICY_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + ur"\TrustedPublisher\Safer"
CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH = ur"Software\Microsoft\SystemCertificates"
CERT_TRUST_PUB_SAFER_LOCAL_MACHINE_REGPATH = CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH + ur"\TrustedPublisher\Safer"
CERT_TRUST_PUB_AUTHENTICODE_FLAGS_VALUE_NAME = u"AuthenticodeFlags"
CERT_OCM_SUBCOMPONENTS_LOCAL_MACHINE_REGPATH = ur"SOFTWARE\Microsoft\Windows\CurrentVersion\Setup\OC Manager\Subcomponents"
CERT_OCM_SUBCOMPONENTS_ROOT_AUTO_UPDATE_VALUE_NAME = ur"RootAutoUpdate"
CERT_DISABLE_ROOT_AUTO_UPDATE_REGPATH = CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH + ur"\AuthRoot"
CERT_DISABLE_ROOT_AUTO_UPDATE_VALUE_NAME = u"DisableRootAutoUpdate"
CERT_AUTH_ROOT_AUTO_UPDATE_LOCAL_MACHINE_REGPATH = CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH + ur"\AuthRoot\AutoUpdate"
CERT_REGISTRY_STORE_REMOTE_FLAG = 0x10000
CERT_REGISTRY_STORE_SERIALIZED_FLAG = 0x20000
CERT_REGISTRY_STORE_CLIENT_GPT_FLAG = (-2147483648)
CERT_REGISTRY_STORE_LM_GPT_FLAG = 0x01000000
CERT_REGISTRY_STORE_ROAMING_FLAG = 0x40000
CERT_REGISTRY_STORE_MY_IE_DIRTY_FLAG = 0x80000
CERT_IE_DIRTY_FLAGS_REGPATH = ur"Software\Microsoft\Cryptography\IEDirtyFlags"
CERT_FILE_STORE_COMMIT_ENABLE_FLAG = 0x10000
CERT_LDAP_STORE_SIGN_FLAG = 0x10000
CERT_LDAP_STORE_AREC_EXCLUSIVE_FLAG = 0x20000
CERT_LDAP_STORE_OPENED_FLAG = 0x40000
CERT_LDAP_STORE_UNBIND_FLAG = 0x80000
CRYPT_OID_OPEN_STORE_PROV_FUNC = "CertDllOpenStoreProv"
CERT_STORE_PROV_EXTERNAL_FLAG = 0x1
CERT_STORE_PROV_DELETED_FLAG = 0x2
CERT_STORE_PROV_NO_PERSIST_FLAG = 0x4
CERT_STORE_PROV_SYSTEM_STORE_FLAG = 0x8
CERT_STORE_PROV_LM_SYSTEM_STORE_FLAG = 0x10
CERT_STORE_PROV_CLOSE_FUNC = 0
CERT_STORE_PROV_READ_CERT_FUNC = 1
CERT_STORE_PROV_WRITE_CERT_FUNC = 2
CERT_STORE_PROV_DELETE_CERT_FUNC = 3
CERT_STORE_PROV_SET_CERT_PROPERTY_FUNC = 4
CERT_STORE_PROV_READ_CRL_FUNC = 5
CERT_STORE_PROV_WRITE_CRL_FUNC = 6
CERT_STORE_PROV_DELETE_CRL_FUNC = 7
CERT_STORE_PROV_SET_CRL_PROPERTY_FUNC = 8
CERT_STORE_PROV_READ_CTL_FUNC = 9
CERT_STORE_PROV_WRITE_CTL_FUNC = 10
CERT_STORE_PROV_DELETE_CTL_FUNC = 11
CERT_STORE_PROV_SET_CTL_PROPERTY_FUNC = 12
CERT_STORE_PROV_CONTROL_FUNC = 13
CERT_STORE_PROV_FIND_CERT_FUNC = 14
CERT_STORE_PROV_FREE_FIND_CERT_FUNC = 15
CERT_STORE_PROV_GET_CERT_PROPERTY_FUNC = 16
CERT_STORE_PROV_FIND_CRL_FUNC = 17
CERT_STORE_PROV_FREE_FIND_CRL_FUNC = 18
CERT_STORE_PROV_GET_CRL_PROPERTY_FUNC = 19
CERT_STORE_PROV_FIND_CTL_FUNC = 20
CERT_STORE_PROV_FREE_FIND_CTL_FUNC = 21
CERT_STORE_PROV_GET_CTL_PROPERTY_FUNC = 22
CERT_STORE_PROV_WRITE_ADD_FLAG = 0x1
CERT_STORE_SAVE_AS_STORE = 1
CERT_STORE_SAVE_AS_PKCS7 = 2
CERT_STORE_SAVE_TO_FILE = 1
CERT_STORE_SAVE_TO_MEMORY = 2
CERT_STORE_SAVE_TO_FILENAME_A = 3
CERT_STORE_SAVE_TO_FILENAME_W = 4
CERT_STORE_SAVE_TO_FILENAME = CERT_STORE_SAVE_TO_FILENAME_W
CERT_CLOSE_STORE_FORCE_FLAG = 0x00000001
CERT_CLOSE_STORE_CHECK_FLAG = 0x00000002
CERT_COMPARE_MASK = 0xFFFF
CERT_COMPARE_SHIFT = 16
CERT_COMPARE_ANY = 0
CERT_COMPARE_SHA1_HASH = 1
CERT_COMPARE_NAME = 2
CERT_COMPARE_ATTR = 3
CERT_COMPARE_MD5_HASH = 4
CERT_COMPARE_PROPERTY = 5
CERT_COMPARE_PUBLIC_KEY = 6
CERT_COMPARE_HASH = CERT_COMPARE_SHA1_HASH
CERT_COMPARE_NAME_STR_A = 7
CERT_COMPARE_NAME_STR_W = 8
CERT_COMPARE_KEY_SPEC = 9
CERT_COMPARE_ENHKEY_USAGE = 10
CERT_COMPARE_CTL_USAGE = CERT_COMPARE_ENHKEY_USAGE
CERT_COMPARE_SUBJECT_CERT = 11
CERT_COMPARE_ISSUER_OF = 12
CERT_COMPARE_EXISTING = 13
CERT_COMPARE_SIGNATURE_HASH = 14
CERT_COMPARE_KEY_IDENTIFIER = 15
CERT_COMPARE_CERT_ID = 16
CERT_COMPARE_CROSS_CERT_DIST_POINTS = 17
CERT_COMPARE_PUBKEY_MD5_HASH = 18
CERT_FIND_ANY = (CERT_COMPARE_ANY << CERT_COMPARE_SHIFT)
CERT_FIND_SHA1_HASH = (CERT_COMPARE_SHA1_HASH << CERT_COMPARE_SHIFT)
CERT_FIND_MD5_HASH = (CERT_COMPARE_MD5_HASH << CERT_COMPARE_SHIFT)
CERT_FIND_SIGNATURE_HASH = (CERT_COMPARE_SIGNATURE_HASH << CERT_COMPARE_SHIFT)
CERT_FIND_KEY_IDENTIFIER = (CERT_COMPARE_KEY_IDENTIFIER << CERT_COMPARE_SHIFT)
CERT_FIND_HASH = CERT_FIND_SHA1_HASH
CERT_FIND_PROPERTY = (CERT_COMPARE_PROPERTY << CERT_COMPARE_SHIFT)
CERT_FIND_PUBLIC_KEY = (CERT_COMPARE_PUBLIC_KEY << CERT_COMPARE_SHIFT)
CERT_FIND_SUBJECT_NAME = (CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | \
CERT_INFO_SUBJECT_FLAG)
CERT_FIND_SUBJECT_ATTR = (CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | \
CERT_INFO_SUBJECT_FLAG)
CERT_FIND_ISSUER_NAME = (CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | \
CERT_INFO_ISSUER_FLAG)
CERT_FIND_ISSUER_ATTR = (CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | \
CERT_INFO_ISSUER_FLAG)
CERT_FIND_SUBJECT_STR_A = (CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | \
CERT_INFO_SUBJECT_FLAG)
CERT_FIND_SUBJECT_STR_W = (CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | \
CERT_INFO_SUBJECT_FLAG)
CERT_FIND_SUBJECT_STR = CERT_FIND_SUBJECT_STR_W
CERT_FIND_ISSUER_STR_A = (CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | \
CERT_INFO_ISSUER_FLAG)
CERT_FIND_ISSUER_STR_W = (CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | \
CERT_INFO_ISSUER_FLAG)
CERT_FIND_ISSUER_STR = CERT_FIND_ISSUER_STR_W
CERT_FIND_KEY_SPEC = (CERT_COMPARE_KEY_SPEC << CERT_COMPARE_SHIFT)
CERT_FIND_ENHKEY_USAGE = (CERT_COMPARE_ENHKEY_USAGE << CERT_COMPARE_SHIFT)
CERT_FIND_CTL_USAGE = CERT_FIND_ENHKEY_USAGE
CERT_FIND_SUBJECT_CERT = (CERT_COMPARE_SUBJECT_CERT << CERT_COMPARE_SHIFT)
CERT_FIND_ISSUER_OF = (CERT_COMPARE_ISSUER_OF << CERT_COMPARE_SHIFT)
CERT_FIND_EXISTING = (CERT_COMPARE_EXISTING << CERT_COMPARE_SHIFT)
CERT_FIND_CERT_ID = (CERT_COMPARE_CERT_ID << CERT_COMPARE_SHIFT)
CERT_FIND_CROSS_CERT_DIST_POINTS = \
(CERT_COMPARE_CROSS_CERT_DIST_POINTS << CERT_COMPARE_SHIFT)
CERT_FIND_PUBKEY_MD5_HASH = \
(CERT_COMPARE_PUBKEY_MD5_HASH << CERT_COMPARE_SHIFT)
CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG = 0x1
CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG = 0x2
CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG = 0x4
CERT_FIND_NO_ENHKEY_USAGE_FLAG = 0x8
CERT_FIND_OR_ENHKEY_USAGE_FLAG = 0x10
CERT_FIND_VALID_ENHKEY_USAGE_FLAG = 0x20
CERT_FIND_OPTIONAL_CTL_USAGE_FLAG = CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG
CERT_FIND_EXT_ONLY_CTL_USAGE_FLAG = \
CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG
CERT_FIND_PROP_ONLY_CTL_USAGE_FLAG = \
CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG
CERT_FIND_NO_CTL_USAGE_FLAG = CERT_FIND_NO_ENHKEY_USAGE_FLAG
CERT_FIND_OR_CTL_USAGE_FLAG = CERT_FIND_OR_ENHKEY_USAGE_FLAG
CERT_FIND_VALID_CTL_USAGE_FLAG = CERT_FIND_VALID_ENHKEY_USAGE_FLAG
CERT_SET_PROPERTY_IGNORE_PERSIST_ERROR_FLAG = (-2147483648)
CERT_SET_PROPERTY_INHIBIT_PERSIST_FLAG = 0x40000000
CTL_ENTRY_FROM_PROP_CHAIN_FLAG = 0x1
CRL_FIND_ANY = 0
CRL_FIND_ISSUED_BY = 1
CRL_FIND_EXISTING = 2
CRL_FIND_ISSUED_FOR = 3
CRL_FIND_ISSUED_BY_AKI_FLAG = 0x1
CRL_FIND_ISSUED_BY_SIGNATURE_FLAG = 0x2
CRL_FIND_ISSUED_BY_DELTA_FLAG = 0x4
CRL_FIND_ISSUED_BY_BASE_FLAG = 0x8
CERT_STORE_ADD_NEW = 1
CERT_STORE_ADD_USE_EXISTING = 2
CERT_STORE_ADD_REPLACE_EXISTING = 3
CERT_STORE_ADD_ALWAYS = 4
CERT_STORE_ADD_REPLACE_EXISTING_INHERIT_PROPERTIES = 5
CERT_STORE_ADD_NEWER = 6
CERT_STORE_ADD_NEWER_INHERIT_PROPERTIES = 7
CERT_STORE_CERTIFICATE_CONTEXT = 1
CERT_STORE_CRL_CONTEXT = 2
CERT_STORE_CTL_CONTEXT = 3
CERT_STORE_ALL_CONTEXT_FLAG = -1
CERT_STORE_CERTIFICATE_CONTEXT_FLAG = \
(1 << CERT_STORE_CERTIFICATE_CONTEXT)
CERT_STORE_CRL_CONTEXT_FLAG = \
(1 << CERT_STORE_CRL_CONTEXT)
CERT_STORE_CTL_CONTEXT_FLAG = \
(1 << CERT_STORE_CTL_CONTEXT)
CTL_ANY_SUBJECT_TYPE = 1
CTL_CERT_SUBJECT_TYPE = 2
CTL_FIND_ANY = 0
CTL_FIND_SHA1_HASH = 1
CTL_FIND_MD5_HASH = 2
CTL_FIND_USAGE = 3
CTL_FIND_SUBJECT = 4
CTL_FIND_EXISTING = 5
CTL_FIND_NO_LIST_ID_CBDATA = (-1)
CTL_FIND_SAME_USAGE_FLAG = 0x1
CERT_STORE_CTRL_RESYNC = 1
CERT_STORE_CTRL_NOTIFY_CHANGE = 2
CERT_STORE_CTRL_COMMIT = 3
CERT_STORE_CTRL_AUTO_RESYNC = 4
CERT_STORE_CTRL_CANCEL_NOTIFY = 5
CERT_STORE_CTRL_INHIBIT_DUPLICATE_HANDLE_FLAG = 0x1
CERT_STORE_CTRL_COMMIT_FORCE_FLAG = 0x1
CERT_STORE_CTRL_COMMIT_CLEAR_FLAG = 0x2
CERT_STORE_LOCALIZED_NAME_PROP_ID = 0x1000
CERT_CREATE_CONTEXT_NOCOPY_FLAG = 0x1
CERT_CREATE_CONTEXT_SORTED_FLAG = 0x2
CERT_CREATE_CONTEXT_NO_HCRYPTMSG_FLAG = 0x4
CERT_CREATE_CONTEXT_NO_ENTRY_FLAG = 0x8
CERT_PHYSICAL_STORE_ADD_ENABLE_FLAG = 0x1
CERT_PHYSICAL_STORE_OPEN_DISABLE_FLAG = 0x2
CERT_PHYSICAL_STORE_REMOTE_OPEN_DISABLE_FLAG = 0x4
CERT_PHYSICAL_STORE_INSERT_COMPUTER_NAME_ENABLE_FLAG = 0x8
CERT_PHYSICAL_STORE_PREDEFINED_ENUM_FLAG = 0x1
# Names of physical cert stores
CERT_PHYSICAL_STORE_DEFAULT_NAME = u".Default"
CERT_PHYSICAL_STORE_GROUP_POLICY_NAME = u".GroupPolicy"
CERT_PHYSICAL_STORE_LOCAL_MACHINE_NAME = u".LocalMachine"
CERT_PHYSICAL_STORE_DS_USER_CERTIFICATE_NAME = u".UserCertificate"
CERT_PHYSICAL_STORE_LOCAL_MACHINE_GROUP_POLICY_NAME = u".LocalMachineGroupPolicy"
CERT_PHYSICAL_STORE_ENTERPRISE_NAME = u".Enterprise"
CERT_PHYSICAL_STORE_AUTH_ROOT_NAME = u".AuthRoot"
CERT_PHYSICAL_STORE_SMART_CARD_NAME = u".SmartCard"
CRYPT_OID_OPEN_SYSTEM_STORE_PROV_FUNC = "CertDllOpenSystemStoreProv"
CRYPT_OID_REGISTER_SYSTEM_STORE_FUNC = "CertDllRegisterSystemStore"
CRYPT_OID_UNREGISTER_SYSTEM_STORE_FUNC = "CertDllUnregisterSystemStore"
CRYPT_OID_ENUM_SYSTEM_STORE_FUNC = "CertDllEnumSystemStore"
CRYPT_OID_REGISTER_PHYSICAL_STORE_FUNC = "CertDllRegisterPhysicalStore"
CRYPT_OID_UNREGISTER_PHYSICAL_STORE_FUNC = "CertDllUnregisterPhysicalStore"
CRYPT_OID_ENUM_PHYSICAL_STORE_FUNC = "CertDllEnumPhysicalStore"
CRYPT_OID_SYSTEM_STORE_LOCATION_VALUE_NAME = u"SystemStoreLocation"
CMSG_TRUSTED_SIGNER_FLAG = 0x1
CMSG_SIGNER_ONLY_FLAG = 0x2
CMSG_USE_SIGNER_INDEX_FLAG = 0x4
CMSG_CMS_ENCAPSULATED_CTL_FLAG = 0x00008000
CMSG_ENCODE_SORTED_CTL_FLAG = 0x1
CMSG_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x2
CERT_VERIFY_INHIBIT_CTL_UPDATE_FLAG = 0x1
CERT_VERIFY_TRUSTED_SIGNERS_FLAG = 0x2
CERT_VERIFY_NO_TIME_CHECK_FLAG = 0x4
CERT_VERIFY_ALLOW_MORE_USAGE_FLAG = 0x8
CERT_VERIFY_UPDATED_CTL_FLAG = 0x1
CERT_CONTEXT_REVOCATION_TYPE = 1
CERT_VERIFY_REV_CHAIN_FLAG = 0x00000001
CERT_VERIFY_CACHE_ONLY_BASED_REVOCATION = 0x00000002
CERT_VERIFY_REV_ACCUMULATIVE_TIMEOUT_FLAG = 0x00000004
CERT_UNICODE_IS_RDN_ATTRS_FLAG = 0x1
CERT_CASE_INSENSITIVE_IS_RDN_ATTRS_FLAG = 0x2
CRYPT_VERIFY_CERT_SIGN_SUBJECT_BLOB = 1
CRYPT_VERIFY_CERT_SIGN_SUBJECT_CERT = 2
CRYPT_VERIFY_CERT_SIGN_SUBJECT_CRL = 3
CRYPT_VERIFY_CERT_SIGN_ISSUER_PUBKEY = 1
CRYPT_VERIFY_CERT_SIGN_ISSUER_CERT = 2
CRYPT_VERIFY_CERT_SIGN_ISSUER_CHAIN = 3
CRYPT_VERIFY_CERT_SIGN_ISSUER_NULL = 4
CRYPT_DEFAULT_CONTEXT_AUTO_RELEASE_FLAG = 0x00000001
CRYPT_DEFAULT_CONTEXT_PROCESS_FLAG = 0x00000002
CRYPT_DEFAULT_CONTEXT_CERT_SIGN_OID = 1
CRYPT_DEFAULT_CONTEXT_MULTI_CERT_SIGN_OID = 2
CRYPT_OID_EXPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllExportPublicKeyInfoEx"
CRYPT_OID_IMPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllImportPublicKeyInfoEx"
CRYPT_ACQUIRE_CACHE_FLAG = 0x00000001
CRYPT_ACQUIRE_USE_PROV_INFO_FLAG = 0x00000002
CRYPT_ACQUIRE_COMPARE_KEY_FLAG = 0x00000004
CRYPT_ACQUIRE_SILENT_FLAG = 0x00000040
CRYPT_FIND_USER_KEYSET_FLAG = 0x00000001
CRYPT_FIND_MACHINE_KEYSET_FLAG = 0x00000002
CRYPT_FIND_SILENT_KEYSET_FLAG = 0x00000040
CRYPT_OID_IMPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllImportPrivateKeyInfoEx"
CRYPT_OID_EXPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllExportPrivateKeyInfoEx"
CRYPT_DELETE_KEYSET = CRYPT_DELETEKEYSET
CERT_SIMPLE_NAME_STR = 1
CERT_OID_NAME_STR = 2
CERT_X500_NAME_STR = 3
CERT_NAME_STR_SEMICOLON_FLAG = 0x40000000
CERT_NAME_STR_NO_PLUS_FLAG = 0x20000000
CERT_NAME_STR_NO_QUOTING_FLAG = 0x10000000
CERT_NAME_STR_CRLF_FLAG = 0x08000000
CERT_NAME_STR_COMMA_FLAG = 0x04000000
CERT_NAME_STR_REVERSE_FLAG = 0x02000000
CERT_NAME_STR_DISABLE_IE4_UTF8_FLAG = 0x00010000
CERT_NAME_STR_ENABLE_T61_UNICODE_FLAG = 0x00020000
CERT_NAME_STR_ENABLE_UTF8_UNICODE_FLAG = 0x00040000
CERT_NAME_EMAIL_TYPE = 1
CERT_NAME_RDN_TYPE = 2
CERT_NAME_ATTR_TYPE = 3
CERT_NAME_SIMPLE_DISPLAY_TYPE = 4
CERT_NAME_FRIENDLY_DISPLAY_TYPE = 5
CERT_NAME_DNS_TYPE = 6
CERT_NAME_URL_TYPE = 7
CERT_NAME_UPN_TYPE = 8
CERT_NAME_ISSUER_FLAG = 0x1
CERT_NAME_DISABLE_IE4_UTF8_FLAG = 0x00010000
CRYPT_MESSAGE_BARE_CONTENT_OUT_FLAG = 0x00000001
CRYPT_MESSAGE_ENCAPSULATED_CONTENT_OUT_FLAG = 0x00000002
CRYPT_MESSAGE_KEYID_SIGNER_FLAG = 0x00000004
CRYPT_MESSAGE_SILENT_KEYSET_FLAG = 0x00000040
CRYPT_MESSAGE_KEYID_RECIPIENT_FLAG = 0x4
CERT_QUERY_OBJECT_FILE = 0x00000001
CERT_QUERY_OBJECT_BLOB = 0x00000002
CERT_QUERY_CONTENT_CERT = 1
CERT_QUERY_CONTENT_CTL = 2
CERT_QUERY_CONTENT_CRL = 3
CERT_QUERY_CONTENT_SERIALIZED_STORE = 4
CERT_QUERY_CONTENT_SERIALIZED_CERT = 5
CERT_QUERY_CONTENT_SERIALIZED_CTL = 6
CERT_QUERY_CONTENT_SERIALIZED_CRL = 7
CERT_QUERY_CONTENT_PKCS7_SIGNED = 8
CERT_QUERY_CONTENT_PKCS7_UNSIGNED = 9
CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED = 10
CERT_QUERY_CONTENT_PKCS10 = 11
CERT_QUERY_CONTENT_PFX = 12
CERT_QUERY_CONTENT_CERT_PAIR = 13
CERT_QUERY_CONTENT_FLAG_CERT = \
( 1 << CERT_QUERY_CONTENT_CERT)
CERT_QUERY_CONTENT_FLAG_CTL = \
( 1 << CERT_QUERY_CONTENT_CTL)
CERT_QUERY_CONTENT_FLAG_CRL = \
( 1 << CERT_QUERY_CONTENT_CRL)
CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE = \
( 1 << CERT_QUERY_CONTENT_SERIALIZED_STORE)
CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT = \
( 1 << CERT_QUERY_CONTENT_SERIALIZED_CERT)
CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL = \
( 1 << CERT_QUERY_CONTENT_SERIALIZED_CTL)
CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL = \
( 1 << CERT_QUERY_CONTENT_SERIALIZED_CRL)
CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED = \
( 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED)
CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED = \
( 1 << CERT_QUERY_CONTENT_PKCS7_UNSIGNED)
CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED = \
( 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED)
CERT_QUERY_CONTENT_FLAG_PKCS10 = \
( 1 << CERT_QUERY_CONTENT_PKCS10)
CERT_QUERY_CONTENT_FLAG_PFX = \
( 1 << CERT_QUERY_CONTENT_PFX)
CERT_QUERY_CONTENT_FLAG_CERT_PAIR = \
( 1 << CERT_QUERY_CONTENT_CERT_PAIR)
CERT_QUERY_CONTENT_FLAG_ALL = \
CERT_QUERY_CONTENT_FLAG_CERT | \
CERT_QUERY_CONTENT_FLAG_CTL | \
CERT_QUERY_CONTENT_FLAG_CRL | \
CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE | \
CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT | \
CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL | \
CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL | \
CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED | \
CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED | \
CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED | \
CERT_QUERY_CONTENT_FLAG_PKCS10 | \
CERT_QUERY_CONTENT_FLAG_PFX | \
CERT_QUERY_CONTENT_FLAG_CERT_PAIR
CERT_QUERY_FORMAT_BINARY = 1
CERT_QUERY_FORMAT_BASE64_ENCODED = 2
CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED = 3
CERT_QUERY_FORMAT_FLAG_BINARY = \
( 1 << CERT_QUERY_FORMAT_BINARY)
CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED = \
( 1 << CERT_QUERY_FORMAT_BASE64_ENCODED)
CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED = \
( 1 << CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED)
CERT_QUERY_FORMAT_FLAG_ALL = \
CERT_QUERY_FORMAT_FLAG_BINARY | \
CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED | \
CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED
CREDENTIAL_OID_PASSWORD_CREDENTIALS_A = 1
CREDENTIAL_OID_PASSWORD_CREDENTIALS_W = 2
CREDENTIAL_OID_PASSWORD_CREDENTIALS = CREDENTIAL_OID_PASSWORD_CREDENTIALS_W
SCHEME_OID_RETRIEVE_ENCODED_OBJECT_FUNC = "SchemeDllRetrieveEncodedObject"
SCHEME_OID_RETRIEVE_ENCODED_OBJECTW_FUNC = "SchemeDllRetrieveEncodedObjectW"
CONTEXT_OID_CREATE_OBJECT_CONTEXT_FUNC = "ContextDllCreateObjectContext"
CONTEXT_OID_CERTIFICATE = 1
CONTEXT_OID_CRL = 2
CONTEXT_OID_CTL = 3
CONTEXT_OID_PKCS7 = 4
CONTEXT_OID_CAPI2_ANY = 5
CONTEXT_OID_OCSP_RESP = 6
CRYPT_RETRIEVE_MULTIPLE_OBJECTS = 0x00000001
CRYPT_CACHE_ONLY_RETRIEVAL = 0x00000002
CRYPT_WIRE_ONLY_RETRIEVAL = 0x00000004
CRYPT_DONT_CACHE_RESULT = 0x00000008
CRYPT_ASYNC_RETRIEVAL = 0x00000010
CRYPT_STICKY_CACHE_RETRIEVAL = 0x00001000
CRYPT_LDAP_SCOPE_BASE_ONLY_RETRIEVAL = 0x00002000
CRYPT_OFFLINE_CHECK_RETRIEVAL = 0x00004000
CRYPT_LDAP_INSERT_ENTRY_ATTRIBUTE = 0x00008000
CRYPT_LDAP_SIGN_RETRIEVAL = 0x00010000
CRYPT_NO_AUTH_RETRIEVAL = 0x00020000
CRYPT_LDAP_AREC_EXCLUSIVE_RETRIEVAL = 0x00040000
CRYPT_AIA_RETRIEVAL = 0x00080000
CRYPT_VERIFY_CONTEXT_SIGNATURE = 0x00000020
CRYPT_VERIFY_DATA_HASH = 0x00000040
CRYPT_KEEP_TIME_VALID = 0x00000080
CRYPT_DONT_VERIFY_SIGNATURE = 0x00000100
CRYPT_DONT_CHECK_TIME_VALIDITY = 0x00000200
CRYPT_CHECK_FRESHNESS_TIME_VALIDITY = 0x00000400
CRYPT_ACCUMULATIVE_TIMEOUT = 0x00000800
CRYPT_PARAM_ASYNC_RETRIEVAL_COMPLETION = 1
CRYPT_PARAM_CANCEL_ASYNC_RETRIEVAL = 2
CRYPT_GET_URL_FROM_PROPERTY = 0x00000001
CRYPT_GET_URL_FROM_EXTENSION = 0x00000002
CRYPT_GET_URL_FROM_UNAUTH_ATTRIBUTE = 0x00000004
CRYPT_GET_URL_FROM_AUTH_ATTRIBUTE = 0x00000008
URL_OID_GET_OBJECT_URL_FUNC = "UrlDllGetObjectUrl"
TIME_VALID_OID_GET_OBJECT_FUNC = "TimeValidDllGetObject"
TIME_VALID_OID_FLUSH_OBJECT_FUNC = "TimeValidDllFlushObject"
TIME_VALID_OID_GET_CTL = 1
TIME_VALID_OID_GET_CRL = 2
TIME_VALID_OID_GET_CRL_FROM_CERT = 3
TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CERT = 4
TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CRL = 5
TIME_VALID_OID_FLUSH_CTL = 1
TIME_VALID_OID_FLUSH_CRL = 2
TIME_VALID_OID_FLUSH_CRL_FROM_CERT = 3
TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CERT = 4
TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CRL = 5
CRYPTPROTECT_PROMPT_ON_UNPROTECT = 0x1
CRYPTPROTECT_PROMPT_ON_PROTECT = 0x2
CRYPTPROTECT_PROMPT_RESERVED = 0x04
CRYPTPROTECT_PROMPT_STRONG = 0x08
CRYPTPROTECT_PROMPT_REQUIRE_STRONG = 0x10
CRYPTPROTECT_UI_FORBIDDEN = 0x1
CRYPTPROTECT_LOCAL_MACHINE = 0x4
CRYPTPROTECT_CRED_SYNC = 0x8
CRYPTPROTECT_AUDIT = 0x10
CRYPTPROTECT_NO_RECOVERY = 0x20
CRYPTPROTECT_VERIFY_PROTECTION = 0x40
CRYPTPROTECT_CRED_REGENERATE = 0x80
CRYPTPROTECT_FIRST_RESERVED_FLAGVAL = 0x0FFFFFFF
CRYPTPROTECT_LAST_RESERVED_FLAGVAL = (-1)
CRYPTPROTECTMEMORY_BLOCK_SIZE = 16
CRYPTPROTECTMEMORY_SAME_PROCESS = 0x00
CRYPTPROTECTMEMORY_CROSS_PROCESS = 0x01
CRYPTPROTECTMEMORY_SAME_LOGON = 0x02
CERT_CREATE_SELFSIGN_NO_SIGN = 1
CERT_CREATE_SELFSIGN_NO_KEY_INFO = 2
CRYPT_KEYID_MACHINE_FLAG = 0x00000020
CRYPT_KEYID_ALLOC_FLAG = 0x00008000
CRYPT_KEYID_DELETE_FLAG = 0x00000010
CRYPT_KEYID_SET_NEW_FLAG = 0x00002000
CERT_CHAIN_MAX_AIA_URL_COUNT_IN_CERT_DEFAULT = 5
CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_COUNT_PER_CHAIN_DEFAULT = 10
CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_BYTE_COUNT_DEFAULT = 100000
CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_CERT_COUNT_DEFAULT = 10
CERT_CHAIN_CACHE_END_CERT = 0x00000001
CERT_CHAIN_THREAD_STORE_SYNC = 0x00000002
CERT_CHAIN_CACHE_ONLY_URL_RETRIEVAL = 0x00000004
CERT_CHAIN_USE_LOCAL_MACHINE_STORE = 0x00000008
CERT_CHAIN_ENABLE_CACHE_AUTO_UPDATE = 0x00000010
CERT_CHAIN_ENABLE_SHARE_STORE = 0x00000020
CERT_TRUST_NO_ERROR = 0x00000000
CERT_TRUST_IS_NOT_TIME_VALID = 0x00000001
CERT_TRUST_IS_NOT_TIME_NESTED = 0x00000002
CERT_TRUST_IS_REVOKED = 0x00000004
CERT_TRUST_IS_NOT_SIGNATURE_VALID = 0x00000008
CERT_TRUST_IS_NOT_VALID_FOR_USAGE = 0x00000010
CERT_TRUST_IS_UNTRUSTED_ROOT = 0x00000020
CERT_TRUST_REVOCATION_STATUS_UNKNOWN = 0x00000040
CERT_TRUST_IS_CYCLIC = 0x00000080
CERT_TRUST_INVALID_EXTENSION = 0x00000100
CERT_TRUST_INVALID_POLICY_CONSTRAINTS = 0x00000200
CERT_TRUST_INVALID_BASIC_CONSTRAINTS = 0x00000400
CERT_TRUST_INVALID_NAME_CONSTRAINTS = 0x00000800
CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT = 0x00001000
CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT = 0x00002000
CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT = 0x00004000
CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT = 0x00008000
CERT_TRUST_IS_OFFLINE_REVOCATION = 0x01000000
CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY = 0x02000000
CERT_TRUST_IS_PARTIAL_CHAIN = 0x00010000
CERT_TRUST_CTL_IS_NOT_TIME_VALID = 0x00020000
CERT_TRUST_CTL_IS_NOT_SIGNATURE_VALID = 0x00040000
CERT_TRUST_CTL_IS_NOT_VALID_FOR_USAGE = 0x00080000
CERT_TRUST_HAS_EXACT_MATCH_ISSUER = 0x00000001
CERT_TRUST_HAS_KEY_MATCH_ISSUER = 0x00000002
CERT_TRUST_HAS_NAME_MATCH_ISSUER = 0x00000004
CERT_TRUST_IS_SELF_SIGNED = 0x00000008
CERT_TRUST_HAS_PREFERRED_ISSUER = 0x00000100
CERT_TRUST_HAS_ISSUANCE_CHAIN_POLICY = 0x00000200
CERT_TRUST_HAS_VALID_NAME_CONSTRAINTS = 0x00000400
CERT_TRUST_IS_COMPLEX_CHAIN = 0x00010000
USAGE_MATCH_TYPE_AND = 0x00000000
USAGE_MATCH_TYPE_OR = 0x00000001
CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000
CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000
CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT = 0x40000000
CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY = (-2147483648)
CERT_CHAIN_REVOCATION_ACCUMULATIVE_TIMEOUT = 0x08000000
CERT_CHAIN_DISABLE_PASS1_QUALITY_FILTERING = 0x00000040
CERT_CHAIN_RETURN_LOWER_QUALITY_CONTEXTS = 0x00000080
CERT_CHAIN_DISABLE_AUTH_ROOT_AUTO_UPDATE = 0x00000100
CERT_CHAIN_TIMESTAMP_TIME = 0x00000200
REVOCATION_OID_CRL_REVOCATION = 1
CERT_CHAIN_FIND_BY_ISSUER = 1
CERT_CHAIN_FIND_BY_ISSUER_COMPARE_KEY_FLAG = 0x0001
CERT_CHAIN_FIND_BY_ISSUER_COMPLEX_CHAIN_FLAG = 0x0002
CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_URL_FLAG = 0x0004
CERT_CHAIN_FIND_BY_ISSUER_LOCAL_MACHINE_FLAG = 0x0008
CERT_CHAIN_FIND_BY_ISSUER_NO_KEY_FLAG = 0x4000
CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_FLAG = 0x8000
CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG = 0x00000001
CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG = 0x00000002
CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG = 0x00000004
CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008
CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = ( \
CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG | \
CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG | \
CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG \
)
CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010
CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020
CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040
CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080
CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG = 0x00000100
CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG = 0x00000200
CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG = 0x00000400
CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG = 0x00000800
CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = ( \
CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG | \
CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG | \
CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG | \
CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG \
)
CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000
CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000
CRYPT_OID_VERIFY_CERTIFICATE_CHAIN_POLICY_FUNC = \
"CertDllVerifyCertificateChainPolicy"
AUTHTYPE_CLIENT = 1
AUTHTYPE_SERVER = 2
BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_CA_FLAG = (-2147483648)
BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_END_ENTITY_FLAG = 0x40000000
MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG = 0x00010000
CRYPT_STRING_BASE64HEADER = 0x00000000
CRYPT_STRING_BASE64 = 0x00000001
CRYPT_STRING_BINARY = 0x00000002
CRYPT_STRING_BASE64REQUESTHEADER = 0x00000003
CRYPT_STRING_HEX = 0x00000004
CRYPT_STRING_HEXASCII = 0x00000005
CRYPT_STRING_BASE64_ANY = 0x00000006
CRYPT_STRING_ANY = 0x00000007
CRYPT_STRING_HEX_ANY = 0x00000008
CRYPT_STRING_BASE64X509CRLHEADER = 0x00000009
CRYPT_STRING_HEXADDR = 0x0000000a
CRYPT_STRING_HEXASCIIADDR = 0x0000000b
CRYPT_STRING_NOCR = (-2147483648)
CRYPT_USER_KEYSET = 0x00001000
PKCS12_IMPORT_RESERVED_MASK = (-65536)
REPORT_NO_PRIVATE_KEY = 0x0001
REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY = 0x0002
EXPORT_PRIVATE_KEYS = 0x0004
PKCS12_EXPORT_RESERVED_MASK = (-65536)
# Certificate store provider types used with CertOpenStore
CERT_STORE_PROV_MSG = 1
CERT_STORE_PROV_MEMORY = 2
CERT_STORE_PROV_FILE = 3
CERT_STORE_PROV_REG = 4
CERT_STORE_PROV_PKCS7 = 5
CERT_STORE_PROV_SERIALIZED = 6
CERT_STORE_PROV_FILENAME = 8
CERT_STORE_PROV_SYSTEM = 10
CERT_STORE_PROV_COLLECTION = 11
CERT_STORE_PROV_SYSTEM_REGISTRY = 13
CERT_STORE_PROV_PHYSICAL = 14
CERT_STORE_PROV_SMART_CARD = 15
CERT_STORE_PROV_LDAP = 16
URL_OID_CERTIFICATE_ISSUER = 1
URL_OID_CERTIFICATE_CRL_DIST_POINT = 2
URL_OID_CTL_ISSUER = 3
URL_OID_CTL_NEXT_UPDATE = 4
URL_OID_CRL_ISSUER = 5
URL_OID_CERTIFICATE_FRESHEST_CRL = 6
URL_OID_CRL_FRESHEST_CRL = 7
URL_OID_CROSS_CERT_DIST_POINT = 8
URL_OID_CERTIFICATE_OCSP = 9
URL_OID_CERTIFICATE_OCSP_AND_CRL_DIST_POINT = 10
URL_OID_CERTIFICATE_CRL_DIST_POINT_AND_OCSP = 11
URL_OID_CROSS_CERT_SUBJECT_INFO_ACCESS = 12
URL_OID_CERTIFICATE_ONLY_OCSP = 13
| bsd-3-clause | 4,232,275,969,361,458,000 | 37.86 | 123 | 0.716006 | false |
MTgeophysics/mtpy | mtpy/gui/mt_file_editor.py | 1 | 44431 | # -*- coding: utf-8 -*-
"""
Created on Wed May 03 19:01:42 2017
@author: jrpeacock
"""
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
import mtpy.core.mt as mt
# header label font
label_font = QtGui.QFont()
label_font.setBold = True
label_font.setPointSize (13)
class MTMainWindow(QtWidgets.QMainWindow):
def __init__(self):
super(MTMainWindow, self).__init__()
self.setWindowTitle('MT File Editor')
self.mt_obj = mt.MT()
self.setup_ui()
def setup_ui(self):
"""
setup user interface
"""
screen_size = QtWidgets.QDesktopWidget().availableGeometry()
width = screen_size.width()
self.menu_file = self.menuBar().addMenu("&File")
self.action_open_file = self.menu_file.addAction("&Open")
self.action_open_file.triggered.connect(self.get_mt_file)
self.action_save_file = self.menu_file.addAction("&Save")
self.action_save_file.triggered.connect(self.save_mt_file)
self.setWindowState(QtCore.Qt.WindowMaximized)
self.central_widget = QtWidgets.QWidget()
self.setCentralWidget(self.central_widget)
self.tab_widget = MTTabWidget(self, self.mt_obj)
self.text_label = QtWidgets.QLabel("File Preview")
self.text_label.setFont(label_font)
self.text_edit = QtWidgets.QTextEdit()
self.text_edit.setMaximumWidth(int(width/2.0))
text_layout = QtWidgets.QVBoxLayout()
text_layout.addWidget(self.text_label)
text_layout.addWidget(self.text_edit)
layout = QtWidgets.QHBoxLayout()
layout.addWidget(self.tab_widget)
layout.addLayout(text_layout)
self.central_widget.setLayout(layout)
#self.centeral_widget = self.setCentralWidget(self.tab_widget)
def get_mt_file(self):
fn_dialog = QtWidgets.QFileDialog()
fn = str(fn_dialog.getOpenFileName(caption='Choose MT file',
directory=self.plot_widget.dir_path,
filter='*.edi;*.xml;*.j')[0])
self.mt_obj = mt.MT()
self.mt_obj.read_mt_file(fn)
self.tab_widget.mt_obj = self.mt_obj
class MTTabWidget(QtWidgets.QTabWidget):
def __init__(self, mt_obj, parent=None):
super(MTTabWidget, self).__init__(parent)
self.mt_obj = mt_obj
self.setup_ui()
def setup_ui(self):
self.tab_site = SiteTab(self)
self.tab_field = FieldNotesTab(self)
self.tab_processing = ProcessingTab(self)
self.tab_provenance = ProvenanceTab(self)
self.tab_copyright = CopyrightTab(self)
self.tab_data = DataTab(self)
self.addTab(self.tab_site, "Site")
self.addTab(self.tab_field, "Field Notes")
self.addTab(self.tab_processing, "Processing")
self.addTab(self.tab_provenance, "Provenance")
self.addTab(self.tab_copyright, "Copyright")
self.addTab(self.tab_data, "Data")
#==============================================================================
# Site tab
#==============================================================================
class SiteTab(QtWidgets.QWidget):
def __init__(self, parent=None):
super(SiteTab, self).__init__(parent)
self.Site = mt.Site()
self.setup_ui()
def setup_ui(self):
self.acq_by_label = QtWidgets.QLabel('Acquired By')
self.acq_by_label.setFont(label_font)
self.acq_by_edit = QtWidgets.QLineEdit()
self.acq_by_edit.setText(self.Site.acquired_by)
self.acq_by_edit.editingFinished.connect(self.set_acq_by)
self.id_label = QtWidgets.QLabel('Station ID')
self.id_label.setFont(label_font)
self.id_edit = QtWidgets.QLineEdit()
self.id_edit.setText(self.Site.id)
self.id_edit.editingFinished.connect(self.set_id)
self.lat_label = QtWidgets.QLabel('Station Latitude (decimal or HH:MM:SS.ms)')
self.lat_label.setFont(label_font)
self.lat_edit = QtWidgets.QLineEdit()
self.lat_edit.setText(self.Site.Location.latitude)
self.lat_edit.editingFinished.connect(self.set_lat)
self.lon_label = QtWidgets.QLabel('Station Longitude (decimal or HH:MM:SS.ms)')
self.lon_label.setFont(label_font)
self.lon_edit = QtWidgets.QLineEdit()
self.lon_edit.setText(self.Site.Location.longitude)
self.lon_edit.editingFinished.connect(self.set_lon)
self.datum_label = QtWidgets.QLabel('Geographic Datum')
self.datum_label.setFont(label_font)
self.datum_combo = QtWidgets.QComboBox()
self.datum_combo.addItems(['WGS84',
'NAD83',
'AGD84',
'GRS80',
'NAD27',
'ED50',
'JGD2011',
'KGD97',
'GCJ02',
'BD09',
'GTRF',])
self.datum_combo.activated[str].connect(self.set_datum)
self.easting_label = QtWidgets.QLabel('Easting (m)')
self.easting_label.setFont(label_font)
self.easting_edit = QtWidgets.QLineEdit()
if self.Site.Location.easting is None:
self.easting_edit.setText('{0:.3f}'.format(0.0))
else:
self.easting_edit.setText('{0:.3f}'.format(self.Site.Location.easting))
self.northing_label = QtWidgets.QLabel('Northing (m)')
self.northing_label.setFont(label_font)
self.northing_edit = QtWidgets.QLineEdit()
if self.Site.Location.northing is None:
self.northing_edit.setText('{0:.3f}'.format(0.0))
else:
self.northing_edit.setText('{0:.3f}'.format(self.Site.Location.northing))
self.utm_label = QtWidgets.QLabel('UTM Zone')
self.utm_label.setFont(label_font)
self.utm_edit = QtWidgets.QLineEdit()
if self.Site.Location.northing is None:
self.utm_edit.setText('00A')
else:
self.utm_edit.setText(self.Site.Location.utm_zone)
self.elev_label = QtWidgets.QLabel('Station Elevation')
self.elev_label.setFont(label_font)
self.elev_edit = QtWidgets.QLineEdit()
self.elev_edit.setText(self.Site.Location.elevation)
self.elev_edit.editingFinished.connect(self.set_elev)
self.elev_units_label = QtWidgets.QLabel('Elevation Units')
self.elev_units_label.setFont(label_font)
self.elev_units_combo = QtWidgets.QComboBox()
self.elev_units_combo.addItems(['km', 'm', 'ft', 'miles'])
self.elev_units_combo.activated[str].connect(self.set_elev_units)
self.coord_label = QtWidgets.QLabel('Z and T Coordinate System')
self.coord_label.setFont(label_font)
self.coord_combo = QtWidgets.QComboBox()
self.coord_combo.addItems(['Geographic North', 'Geomagnetic North'])
self.coord_combo.activated[str].connect(self.set_coord)
self.dec_label = QtWidgets.QLabel('Geomagnetic Declination (deg)')
self.dec_label.setFont(label_font)
self.dec_edit = QtWidgets.QLineEdit()
self.dec_edit.setText(self.Site.Location.declination)
self.dec_edit.editingFinished.connect(self.set_dec)
self.proj_label = QtWidgets.QLabel('Project Name')
self.proj_label.setFont(label_font)
self.proj_edit = QtWidgets.QLineEdit()
self.proj_edit.setText(self.Site.project)
self.proj_edit.editingFinished.connect(self.set_proj)
self.survey_label = QtWidgets.QLabel('Survey Name')
self.survey_label.setFont(label_font)
self.survey_edit = QtWidgets.QLineEdit()
self.survey_edit.setText(self.Site.survey)
self.survey_edit.editingFinished.connect(self.set_survey)
layout = QtWidgets.QFormLayout()
layout.addRow(self.proj_label, self.proj_edit)
layout.addRow(self.survey_label, self.survey_edit)
layout.addRow(self.id_label, self.id_edit)
layout.addRow(self.lat_label, self.lat_edit)
layout.addRow(self.lon_label, self.lon_edit)
layout.addRow(self.datum_label, self.datum_combo)
layout.addRow(self.easting_label, self.easting_edit)
layout.addRow(self.northing_label, self.northing_edit)
layout.addRow(self.utm_label, self.utm_edit)
layout.addRow(self.coord_label, self.coord_combo)
layout.addRow(self.elev_label, self.elev_edit)
layout.addRow(self.elev_units_label, self.elev_units_combo)
layout.addRow(self.acq_by_label, self.acq_by_edit)
layout.addRow(self.dec_label, self.dec_edit)
self.setLayout(layout)
def set_acq_by(self):
self.Site.acquired_by = str(self.acq_by_edit.text())
self.acq_by_edit.setText(self.Site.acquired_by)
def set_id(self):
self.Site.id = str(self.id_edit.text())
self.id_edit.setText(self.Site.id)
def set_proj(self):
self.Site.project = str(self.id_edit.text())
self.proj_edit.setText(self.Site.project)
def set_survey(self):
self.Site.survey = str(self.survey_edit.text())
self.survey_edit.setText(self.Site.survey)
def set_lat(self):
self.Site.Location.latitude = str(self.lat_edit.text())
self.lat_edit.setText('{0:.6f}'.format(self.Site.Location.latitude))
self._set_utm()
def set_lon(self):
self.Site.Location.longitude = str(self.lon_edit.text())
self.lon_edit.setText('{0:.6f}'.format(self.Site.Location.longitude))
self._set_utm()
def set_easting(self):
self.Site.Location.easting = float(self.easting_edit.text())
self.easting_edit.setText('{0:.3f}'.format(self.Site.Location.easting))
self._set_ll()
def set_northing(self):
self.Site.Location.northing = float(self.northing_edit.text())
self.northing_edit.setText('{0:.3f}'.format(self.Site.Location.northing))
self._set_ll()
def set_utm(self):
self.Site.Location.utm_zone = str(self.utm_edit.text())
self.utm_edit.setText(self.Site.Location.utm_zone)
self._set_ll()
def _set_ll(self):
if self.Site.Location.easting is not None and \
self.Site.Location.northing is not None and \
self.Site.Location.utm_zone is not None:
self.Site.Location.project_location2ll()
self.lat_edit.setText(self.Site.Location.latitude)
self.lon_edit.setText(self.Site.Location.longitude)
else:
print(self.Site.Location.easting, self.Site.Location.northing, self.Site.Location.utm_zone)
def _set_utm(self):
if self.Site.Location.latitude is not None and \
self.Site.Location.longitude is not None:
self.Site.Location.project_location2utm()
self.easting_edit.setText('{0:.3f}'.format(self.Site.Location.easting))
self.northing_edit.setText('{0:.3f}'.format(self.Site.Location.northing))
self.utm_edit.setText(self.Site.Location.utm_zone)
def set_elev(self):
self.Site.Location.elevation = str(self.elev_edit.text())
self.elev_edit.setText('{0:.3f}'.format(self.Site.Location.elevation))
def set_elev_units(self, text):
self.Site.Location.elev_units = text
def set_datum(self, text):
self.Site.Location.datum = text
self._set_utm()
def set_dec(self):
try:
self.Site.Location.declination = float(self.dec_edit.text())
self.dec_edit.setText('{0:.3f}'.format(self.Site.Location.declination))
except ValueError:
self.Site.Location.declination = 0.0
self.dec_edit.setText('{0:.3f}'.format(self.Site.Location.declination))
def set_coord(self, text):
self.Site.Location.coordinate_system = text
#==============================================================================
# Field notes tab
#==============================================================================
class FieldNotesTab(QtWidgets.QWidget):
"""
Tab to hold field notes
"""
def __init__(self, parent=None):
super(FieldNotesTab, self).__init__(parent)
self.FieldNotes = mt.FieldNotes()
self._chn_list = ['{0:d}'.format(ii) for ii in range(1, 7, 1)]
self._rating_list = ['{0:d}'.format(ii) for ii in range(11)]
self.setup_ui()
def setup_ui(self):
self.data_logger_label = QtWidgets.QLabel('Data Logger')
self.data_logger_label.setFont(label_font)
self.data_logger_id_label = QtWidgets.QLabel('ID')
self.data_logger_id_edit = QtWidgets.QLineEdit(self.FieldNotes.DataLogger.id)
self.data_logger_id_edit.editingFinished.connect(self.set_dl_id)
self.data_logger_man_label = QtWidgets.QLabel('Manufacturer')
self.data_logger_man_edit = QtWidgets.QLineEdit(self.FieldNotes.DataLogger.manufacturer)
self.data_logger_man_edit.editingFinished.connect(self.set_dl_man)
self.data_logger_type_label = QtWidgets.QLabel('Type')
self.data_logger_type_edit = QtWidgets.QLineEdit(self.FieldNotes.DataLogger.type)
self.data_logger_type_edit.editingFinished.connect(self.set_dl_type)
#--> Instrument information
self.ex_widget = Electrode_Widget(self.FieldNotes.Electrode_ex,
comp='EX')
self.ey_widget = Electrode_Widget(self.FieldNotes.Electrode_ey,
comp='EY')
self.hx_widget = Magnetometer_Widget(self.FieldNotes.Magnetometer_hx,
comp='HX')
self.hy_widget = Magnetometer_Widget(self.FieldNotes.Magnetometer_hy,
comp='HY')
self.hz_widget = Magnetometer_Widget(self.FieldNotes.Magnetometer_hz,
comp='HZ')
##--> data quality
self.dq_label = QtWidgets.QLabel('Data Quality')
self.dq_label.setFont(label_font)
self.dq_good_periods = QtWidgets.QLabel('Good Periods (min, max)')
self.dq_good_periods_min = QtWidgets.QLineEdit()
self.dq_good_periods_min.editingFinished.connect(self.set_dq_period_min)
self.dq_good_periods_max = QtWidgets.QLineEdit()
self.dq_good_periods_max.editingFinished.connect(self.set_dq_period_max)
self.dq_rating_label = QtWidgets.QLabel('Rating')
self.dq_rating_combo = QtWidgets.QComboBox()
self.dq_rating_combo.addItems(self._rating_list)
self.dq_rating_combo.currentIndexChanged.connect(self.set_dq_rating)
self.dq_warning_flag_label = QtWidgets.QLabel('Warning Flag')
self.dq_warning_flag_combo = QtWidgets.QComboBox()
self.dq_warning_flag_combo.addItems(['False', 'True'])
self.dq_warning_flag_combo.currentIndexChanged.connect(self.set_dq_flag)
self.dq_warning_comments_label = QtWidgets.QLabel('Warning Comments')
self.dq_warning_comments_edit = QtWidgets.QLineEdit()
self.dq_warning_comments_edit.editingFinished.connect(self.set_dq_warning_comments)
self.dq_comments = QtWidgets.QTextEdit()
self.dq_comments.setText('Data Quaility Comments')
self.dq_comments.textChanged.connect(self.set_dq_comments)
#--> layout
layout = QtWidgets.QFormLayout()
layout.addRow(self.data_logger_label, None)
layout.addRow(self.data_logger_id_label, self.data_logger_id_edit)
layout.addRow(self.data_logger_man_label, self.data_logger_man_edit)
layout.addRow(self.data_logger_type_label, self.data_logger_type_edit)
dq_layout = QtWidgets.QGridLayout()
dq_layout.addWidget(self.dq_label, 0, 0)
dq_layout.addWidget(self.dq_good_periods, 1, 0)
dq_layout.addWidget(self.dq_good_periods_min, 1, 1)
dq_layout.addWidget(self.dq_good_periods_max, 1, 2)
dq_layout.addWidget(self.dq_warning_flag_label, 2, 0)
dq_layout.addWidget(self.dq_warning_flag_combo, 2, 1)
dq_layout.addWidget(self.dq_warning_comments_label, 2, 2)
dq_layout.addWidget(self.dq_warning_comments_edit, 2, 3)
dq_layout.addWidget(self.dq_comments, 3, 0, 2, 4)
final_layout = QtWidgets.QVBoxLayout()
final_layout.addLayout(layout)
final_layout.addLayout(dq_layout)
final_layout.addWidget(self.ex_widget)
final_layout.addWidget(self.ey_widget)
final_layout.addWidget(self.hx_widget)
final_layout.addWidget(self.hy_widget)
final_layout.addWidget(self.hz_widget)
self.setLayout(final_layout)
def set_dl_id(self):
self.FieldNotes.DataLogger.id = self.data_logger_id_edit.text()
def set_dl_man(self):
self.FieldNotes.DataLogger.manufacturer = self.data_logger_man_edit.text()
def set_dl_type(self):
self.FieldNotes.DataLogger.type = self.data_logger_type_edit.text()
def set_dq_period_min(self):
self.FieldNotes.DataQuality.good_from_period = _check_float(self.dq_good_periods_min.text())
self.dq_good_periods_min.setText('{0:.5g}'.format(self.FieldNotes.DataQuality.good_from_period))
def set_dq_period_max(self):
self.FieldNotes.DataQuality.good_to_period = _check_float(self.dq_good_periods_max.text())
self.dq_good_periods_max.setText('{0:.5g}'.format(self.FieldNotes.DataQuality.good_to_period))
def set_dq_rating(self):
self.FieldNotes.DataQuality.rating = self.dq_rating_combo.currentIndex()
def set_dq_flag(self):
self.FieldNotes.DataQuality.warnings_flag = self.dq_warning_flag_combo.currentIndex()
def set_dq_warning_comments(self):
self.FieldNotes.DataQuality.warnings_comments = self.dq_warning_comments_edit.text()
def set_dq_comments(self):
self.FieldNotes.DataQuality.comments = self.dq_comments.toPlainText()
#==============================================================================
# Electrode
#==============================================================================
class Electrode_Widget(QtWidgets.QWidget):
"""
class to hold Magnetometer information
"""
def __init__(self, electrode_class, comp='EX', parent=None):
super(Electrode_Widget, self).__init__(parent)
self.Electrode = electrode_class
self.comp = comp
self._chn_list = ['{0:d}'.format(ii) for ii in range(1, 7, 1)]
self.setup_ui()
def setup_ui(self):
self.e_label = QtWidgets.QLabel('Electrode {0}'.format(self.comp))
self.e_label.setFont(label_font)
self.e_id_label = QtWidgets.QLabel('ID')
self.e_id_edit = QtWidgets.QLineEdit('{0}'.format(self.Electrode.id))
self.e_id_edit.editingFinished.connect(self.set_e_id)
self.e_man_label = QtWidgets.QLabel('Manufacturer')
self.e_man_edit = QtWidgets.QLineEdit(self.Electrode.manufacturer)
self.e_man_edit.editingFinished.connect(self.set_e_man)
self.e_type_label = QtWidgets.QLabel('Type')
self.e_type_edit = QtWidgets.QLineEdit(self.Electrode.type)
self.e_type_edit.editingFinished.connect(self.set_e_type)
self.e_x_label = QtWidgets.QLabel("X (m)")
self.e_x_edit = QtWidgets.QLineEdit()
self.e_x_edit.editingFinished.connect(self.set_x)
self.e_y_label = QtWidgets.QLabel("Y (m)")
self.e_y_edit = QtWidgets.QLineEdit()
self.e_y_edit.editingFinished.connect(self.set_y)
self.e_x2_label = QtWidgets.QLabel("X2 (m)")
self.e_x2_edit = QtWidgets.QLineEdit()
self.e_x2_edit.editingFinished.connect(self.set_x2)
self.e_y2_label = QtWidgets.QLabel("Y2 (m)")
self.e_y2_edit = QtWidgets.QLineEdit()
self.e_y2_edit.editingFinished.connect(self.set_y2)
self.e_acqchn_label = QtWidgets.QLabel("Acq. Channel")
self.e_acqchn_combo = QtWidgets.QComboBox()
self.e_acqchn_combo.addItems(self._chn_list)
self.e_acqchn_combo.currentIndexChanged.connect(self.set_chn)
##--> set layout
e_layout = QtWidgets.QGridLayout()
e_layout.addWidget(self.e_label, 0, 0)
e_layout.addWidget(self.e_id_label, 1, 0)
e_layout.addWidget(self.e_id_edit, 1, 1)
e_layout.addWidget(self.e_man_label, 1, 2)
e_layout.addWidget(self.e_man_edit, 1, 3)
e_layout.addWidget(self.e_type_label, 1, 4)
e_layout.addWidget(self.e_type_edit, 1, 5)
e_layout.addWidget(self.e_x_label, 2, 0)
e_layout.addWidget(self.e_x_edit, 2, 1)
e_layout.addWidget(self.e_y_label, 2, 2)
e_layout.addWidget(self.e_y_edit, 2, 3)
e_layout.addWidget(self.e_x2_label, 2, 4)
e_layout.addWidget(self.e_x2_edit, 2, 5)
e_layout.addWidget(self.e_y2_label, 2, 6)
e_layout.addWidget(self.e_y2_edit, 2, 7)
e_layout.addWidget(self.e_acqchn_label, 1, 6)
e_layout.addWidget(self.e_acqchn_combo, 1, 7)
self.setLayout(e_layout)
def set_e_id(self):
self.Electrode.id = self.e_id_edit.text()
def set_e_man(self):
self.Electrode.manufacturer = self.e_man_edit.text()
def set_e_type(self):
self.Electrode.type = self.e_type_edit.text()
def set_x(self):
self.Electrode.x = _check_float(self.e_x_edit.text())
self.e_x_edit.setText('{0:.2f}'.format(self.Electrode.x))
def set_x2(self):
self.Electrode.x2 = _check_float(self.e_x2_edit.text())
self.e_x2_edit.setText('{0:.2f}'.format(self.Electrode.x2))
def set_y(self):
self.Electrode.y = _check_float(self.e_y_edit.text())
self.e_y_edit.setText('{0:.2f}'.format(self.Electrode.y))
def set_y2(self):
self.Electrode.y2 = _check_float(self.e_y2_edit.text())
self.e_y2_edit.setText('{0:.2f}'.format(self.Electrode.y2))
def set_chn(self):
self.Electrode.acqchn = int(self.e_acqchn_combo.currentIndex())
#==============================================================================
# Magnetometer
#==============================================================================
class Magnetometer_Widget(QtWidgets.QWidget):
"""
class to hold magnetometer information
"""
def __init__(self, magnetometer_class, comp='HX', parent=None):
super(Magnetometer_Widget, self).__init__(parent)
self.Magnetometer = magnetometer_class
self.comp = comp
self._chn_list = ['{0:d}'.format(ii) for ii in range(1, 7, 1)]
self.setup_ui()
def setup_ui(self):
self.h_label = QtWidgets.QLabel('Magnetometer {0}'.format(self.comp))
self.h_label.setFont(label_font)
self.h_id_label = QtWidgets.QLabel('ID')
self.h_id_edit = QtWidgets.QLineEdit('{0}'.format(self.Magnetometer.id))
self.h_id_edit.editingFinished.connect(self.set_id)
self.h_man_label = QtWidgets.QLabel('Manufacturer')
self.h_man_edit = QtWidgets.QLineEdit(self.Magnetometer.manufacturer)
self.h_man_edit.editingFinished.connect(self.set_man)
self.h_type_label = QtWidgets.QLabel('Type')
self.h_type_edit = QtWidgets.QLineEdit(self.Magnetometer.type)
self.h_type_edit.editingFinished.connect(self.set_type)
self.h_x_label = QtWidgets.QLabel("X (m)")
self.h_x_edit = QtWidgets.QLineEdit()
self.h_x_edit.editingFinished.connect(self.set_x)
self.h_y_label = QtWidgets.QLabel("Y (m)")
self.h_y_edit = QtWidgets.QLineEdit()
self.h_y_edit.editingFinished.connect(self.set_y)
self.h_azm_label = QtWidgets.QLabel("Azimuth (deg)")
self.h_azm_edit = QtWidgets.QLineEdit()
self.h_azm_edit.editingFinished.connect(self.set_azm)
self.h_acqchn_label = QtWidgets.QLabel("Acq. Channel")
self.h_acqchn_combo = QtWidgets.QComboBox()
self.h_acqchn_combo.addItems(self._chn_list)
self.h_acqchn_combo.currentIndexChanged.connect(self.set_chn)
##--> set layout
h_layout = QtWidgets.QGridLayout()
h_layout.addWidget(self.h_label, 0, 0)
h_layout.addWidget(self.h_id_label, 1, 0)
h_layout.addWidget(self.h_id_edit, 1, 1)
h_layout.addWidget(self.h_man_label, 1, 2)
h_layout.addWidget(self.h_man_edit, 1, 3)
h_layout.addWidget(self.h_type_label, 1, 4)
h_layout.addWidget(self.h_type_edit, 1, 5)
h_layout.addWidget(self.h_x_label, 2, 0)
h_layout.addWidget(self.h_x_edit, 2, 1)
h_layout.addWidget(self.h_y_label, 2, 2)
h_layout.addWidget(self.h_y_edit, 2, 3)
h_layout.addWidget(self.h_azm_label, 2, 4)
h_layout.addWidget(self.h_azm_edit, 2, 5)
h_layout.addWidget(self.h_acqchn_label, 1, 6)
h_layout.addWidget(self.h_acqchn_combo, 1, 7)
self.setLayout(h_layout)
def set_id(self):
self.Magnetometer.id = self.h_id_edit.text()
def set_man(self):
self.Magnetometer.manufacturer = self.h_man_edit.text()
def set_type(self):
self.Magnetometer.type = self.h_type_edit.text()
def set_x(self):
self.Magnetometer.x = _check_float(self.h_x_edit.text())
self.h_x_edit.setText('{0:.2f}'.format(self.Magnetometer.x))
def set_y(self):
self.Magnetometer.y = _check_float(self.h_y_edit.text())
self.h_y_edit.setText('{0:.2f}'.format(self.Magnetometer.y))
def set_azm(self):
self.Magnetometer.azm = _check_float(self.h_azm_edit.text())
self.h_azm_edit.setText('{0:.2f}'.format(self.Magnetometer.azm))
def set_chn(self):
self.Magnetometer.acqchn = int(self.h_acqchn_combo.currentIndex())
#==============================================================================
# Processing
#==============================================================================
class ProcessingTab(QtWidgets.QWidget):
"""
processing tab
"""
def __init__(self, parent=None):
super(ProcessingTab, self).__init__(parent)
self.Processing = mt.Processing()
self.setup_ui()
def setup_ui(self):
self.software_label = QtWidgets.QLabel('Software')
self.software_label.setFont(label_font)
self.software_name_label = QtWidgets.QLabel('Name')
self.software_name_edit = QtWidgets.QLineEdit()
self.software_name_edit.editingFinished.connect(self.set_software_name)
self.software_version_label = QtWidgets.QLabel('Version')
self.software_version_edit = QtWidgets.QLineEdit()
self.software_version_edit.editingFinished.connect(self.set_software_version)
self.software_author_label = QtWidgets.QLabel('Author')
self.software_author_edit = QtWidgets.QLineEdit()
self.software_author_edit.editingFinished.connect(self.set_software_author)
self.software_author_email_label = QtWidgets.QLabel('Author Email')
self.software_author_email_edit = QtWidgets.QLineEdit()
self.software_author_email_edit.editingFinished.connect(self.set_software_author_email)
self.software_author_org_label = QtWidgets.QLabel('Author Organization')
self.software_author_org_edit = QtWidgets.QLineEdit()
self.software_author_org_edit.editingFinished.connect(self.set_software_author_org)
self.software_author_url_label = QtWidgets.QLabel('URL')
self.software_author_url_edit = QtWidgets.QLineEdit()
self.software_author_url_edit.editingFinished.connect(self.set_software_author_url)
self.software_date_label = QtWidgets.QLabel('Date (YYYY-MM-DD')
self.software_date_edit = QtWidgets.QLineEdit()
self.software_date_edit.editingFinished.connect(self.set_software_date)
self.notes_label = QtWidgets.QLabel('Notes:')
self.notes_label.setFont(label_font)
self.notes_edit = QtWidgets.QTextEdit()
self.notes_edit.textChanged.connect(self.set_notes)
# self.parameters = []
# self.add_parameter_button = QtWidgets.QPushButton('Add Parameter')
# self.add_parameter_button.pressed.connect(self.add_parameter)
h_line_00 = QtWidgets.QFrame(self)
h_line_00.setFrameShape(QtWidgets.QFrame.HLine)
h_line_00.setFrameShadow(QtWidgets.QFrame.Sunken)
# layout
grid_layout = QtWidgets.QGridLayout()
grid_layout.addWidget(self.software_label, 0, 0)
grid_layout.addWidget(self.software_name_label, 1, 0)
grid_layout.addWidget(self.software_name_edit, 1, 1)
grid_layout.addWidget(self.software_version_label, 1, 2)
grid_layout.addWidget(self.software_version_edit, 1, 3)
grid_layout.addWidget(self.software_date_label, 1, 4)
grid_layout.addWidget(self.software_date_edit, 1, 5)
grid_layout.addWidget(self.software_author_label, 2, 0)
grid_layout.addWidget(self.software_author_edit, 2, 1)
grid_layout.addWidget(self.software_author_email_label, 2, 2)
grid_layout.addWidget(self.software_author_email_edit, 2, 3)
grid_layout.addWidget(self.software_author_org_label, 2, 4)
grid_layout.addWidget(self.software_author_org_edit, 2, 5)
grid_layout.addWidget(self.software_author_url_label, 3, 0)
grid_layout.addWidget(self.software_author_url_edit, 3, 1, 1, 5)
notes_layout = QtWidgets.QVBoxLayout()
notes_layout.addWidget(self.notes_label)
notes_layout.addWidget(self.notes_edit)
final_layout = QtWidgets.QVBoxLayout()
final_layout.addLayout(grid_layout)
final_layout.addWidget(h_line_00)
final_layout.addLayout(notes_layout)
self.setLayout(final_layout)
def set_software_name(self):
pass
def set_software_version(self):
pass
def set_software_author(self):
pass
def set_software_date(self):
pass
def set_software_author_email(self):
pass
def set_software_author_org(self):
pass
def set_software_author_url(self):
pass
def set_notes(self):
pass
class ProcessingParameter(QtWidgets.QWidget):
"""
processing name and value
"""
def __init__(self, parent=None):
super(ProcessingParameter, self).__init__(parent)
self.name = None
self.value = None
self.setup_ui()
def setup_ui(self):
self.value_edit = QtWidgets.QLineEdit()
self.value_edit.editingFinished.connect(self.set_value)
self.name_edit = QtWidgets.QLineEdit()
self.name_edit.editingFinished.connect(self.set_name)
# layout
self.setLayout(QtWidgets.QFormLayout(self.name_edit, self.value_edit))
def set_name(self):
self.name = self.name_edit.text()
def set_value(self):
self.value = self.value_edit.text()
#==============================================================================
# Provenance
#==============================================================================
class ProvenanceTab(QtWidgets.QWidget):
"""
Provenance
"""
def __init__(self, parent=None):
super(ProvenanceTab, self).__init__(parent)
self.Provenance = mt.Provenance()
self.setup_ui()
def setup_ui(self):
self.creating_app_label = QtWidgets.QLabel('Creating Application')
self.creating_app_edit = QtWidgets.QLineEdit()
self.creating_app_edit.editingFinished.connect(self.set_creating_app)
self.creation_time_label = QtWidgets.QLabel('Creation Date')
self.creation_time_edit = QtWidgets.QDateEdit()
self.creation_time_edit.setCalendarPopup(True)
self.creation_time_edit.setDisplayFormat('yyyy-MM-dd')
self.creation_time_edit.dateChanged.connect(self.set_creation_time)
self.creator_label = QtWidgets.QLabel('Creator')
self.creator_label.setFont(label_font)
self.creator_name_label = QtWidgets.QLabel('Name')
self.creator_name_edit = QtWidgets.QLineEdit()
self.creator_name_edit.editingFinished.connect(self.set_creator_name)
self.creator_email_label = QtWidgets.QLabel('email')
self.creator_email_edit = QtWidgets.QLineEdit()
self.creator_email_edit.editingFinished.connect(self.set_creator_email)
self.creator_org_label = QtWidgets.QLabel('Organization')
self.creator_org_edit = QtWidgets.QLineEdit()
self.creator_org_edit.editingFinished.connect(self.set_creator_org)
self.creator_url_label = QtWidgets.QLabel('Organization URL')
self.creator_url_edit = QtWidgets.QLineEdit()
self.creator_url_edit.editingFinished.connect(self.set_creator_url)
self.submitter_label = QtWidgets.QLabel('Submitter')
self.submitter_label.setFont(label_font)
self.submitter_name_label = QtWidgets.QLabel('Name')
self.submitter_name_edit = QtWidgets.QLineEdit()
self.submitter_name_edit.editingFinished.connect(self.set_submitter_name)
self.submitter_email_label = QtWidgets.QLabel('email')
self.submitter_email_edit = QtWidgets.QLineEdit()
self.submitter_email_edit.editingFinished.connect(self.set_submitter_email)
self.submitter_org_label = QtWidgets.QLabel('Organization')
self.submitter_org_edit = QtWidgets.QLineEdit()
self.submitter_org_edit.editingFinished.connect(self.set_submitter_org)
self.submitter_url_label = QtWidgets.QLabel('Organization URL')
self.submitter_url_edit = QtWidgets.QLineEdit()
self.submitter_url_edit.editingFinished.connect(self.set_submitter_url)
##--> Layout
creation_layout = QtWidgets.QFormLayout()
creation_layout.addRow(self.creating_app_label,
self.creating_app_edit)
creation_layout.addRow(self.creation_time_label,
self.creation_time_edit)
creation_layout.setAlignment(QtCore.Qt.AlignTop)
creator_layout = QtWidgets.QGridLayout()
creator_layout.addWidget(self.creator_label, 0, 0)
creator_layout.addWidget(self.creator_name_label, 1, 0)
creator_layout.addWidget(self.creator_name_edit, 1, 1)
creator_layout.addWidget(self.creator_email_label, 1, 2)
creator_layout.addWidget(self.creator_email_edit, 1, 3)
creator_layout.addWidget(self.creator_org_label, 1, 4)
creator_layout.addWidget(self.creator_org_edit, 1, 5)
creator_layout.addWidget(self.creator_url_label, 2, 0)
creator_layout.addWidget(self.creator_url_edit, 2, 1, 1, 5)
creator_layout.setAlignment(QtCore.Qt.AlignTop)
submitter_layout = QtWidgets.QGridLayout()
submitter_layout.addWidget(self.submitter_label, 0, 0)
submitter_layout.addWidget(self.submitter_name_label, 1, 0)
submitter_layout.addWidget(self.submitter_name_edit, 1, 1)
submitter_layout.addWidget(self.submitter_email_label, 1, 2)
submitter_layout.addWidget(self.submitter_email_edit, 1, 3)
submitter_layout.addWidget(self.submitter_org_label, 1, 4)
submitter_layout.addWidget(self.submitter_org_edit, 1, 5)
submitter_layout.addWidget(self.submitter_url_label, 2, 0)
submitter_layout.addWidget(self.submitter_url_edit, 2, 1, 1, 5)
submitter_layout.setAlignment(QtCore.Qt.AlignTop)
final_layout = QtWidgets.QVBoxLayout()
final_layout.addLayout(creation_layout)
final_layout.addLayout(creator_layout)
final_layout.addLayout(submitter_layout)
final_layout.addStretch(0)
final_layout.setAlignment(QtCore.Qt.AlignTop)
self.setLayout(final_layout)
def set_creating_app(self):
pass
def set_creation_time(self):
date = self.creation_time_edit.date()
print(date.toPyDate())
def set_creator_name(self):
pass
def set_creator_email(self):
pass
def set_creator_org(self):
pass
def set_creator_url(self):
pass
def set_submitter_name(self):
pass
def set_submitter_email(self):
pass
def set_submitter_org(self):
pass
def set_submitter_url(self):
pass
#==============================================================================
# Copyright
#==============================================================================
class CopyrightTab(QtWidgets.QWidget):
"""
copyright
"""
def __init__(self, parent=None):
super(CopyrightTab, self).__init__(parent)
self.Copyright = mt.Copyright()
self._release_list = ['Unrestricted Release',
'Academic Use Only',
'Restrictions Apply']
self.setup_ui()
def setup_ui(self):
self.citation_label = QtWidgets.QLabel('Citation')
self.citation_label.setFont(label_font)
self.citation_author_label = QtWidgets.QLabel('Author')
self.citation_author_edit = QtWidgets.QLineEdit()
self.citation_author_edit.editingFinished.connect(self.set_author)
self.citation_title_label = QtWidgets.QLabel('Title')
self.citation_title_edit = QtWidgets.QLineEdit()
self.citation_title_edit.editingFinished.connect(self.set_title)
self.citation_journal_label = QtWidgets.QLabel('Journal')
self.citation_journal_edit = QtWidgets.QLineEdit()
self.citation_journal_edit.editingFinished.connect(self.set_journal)
self.citation_volume_label = QtWidgets.QLabel('Volume')
self.citation_volume_edit = QtWidgets.QLineEdit()
self.citation_volume_edit.editingFinished.connect(self.set_volume)
self.citation_year_label = QtWidgets.QLabel('Year')
self.citation_year_edit = QtWidgets.QLineEdit()
self.citation_year_edit.editingFinished.connect(self.set_year)
self.citation_doi_label = QtWidgets.QLabel('DOI')
self.citation_doi_edit = QtWidgets.QLineEdit()
self.citation_doi_edit.editingFinished.connect(self.set_doi)
self.release_status_name = QtWidgets.QLabel('Release Status')
self.release_status_combo = QtWidgets.QComboBox()
self.release_status_combo.addItems(self._release_list)
self.release_status_combo.currentIndexChanged.connect(self.set_release_status)
self.conditions_of_use_label = QtWidgets.QLabel('Conditions of Use')
self.conditions_of_use_edit = QtWidgets.QTextEdit()
self.conditions_of_use_edit.setText(self.Copyright.conditions_of_use)
self.conditions_of_use_edit.textChanged.connect(self.set_conditions)
##--> layout
cite_layout = QtWidgets.QGridLayout()
cite_layout.addWidget(self.citation_label, 0, 0)
cite_layout.addWidget(self.citation_author_label, 1, 0, 1, 5)
cite_layout.addWidget(self.citation_author_edit, 1, 1, 1, 5)
cite_layout.addWidget(self.citation_title_label, 2, 0, 1, 5)
cite_layout.addWidget(self.citation_title_edit, 2, 1, 1, 5)
cite_layout.addWidget(self.citation_journal_label, 3, 0)
cite_layout.addWidget(self.citation_journal_edit, 3, 1)
cite_layout.addWidget(self.citation_volume_label, 3, 2)
cite_layout.addWidget(self.citation_volume_edit, 3, 3)
cite_layout.addWidget(self.citation_year_label, 3, 4)
cite_layout.addWidget(self.citation_year_edit, 3, 5)
cite_layout.addWidget(self.citation_doi_label, 4, 0, 1, 5)
cite_layout.addWidget(self.citation_doi_edit, 4, 1, 1, 5)
cite_layout.setAlignment(QtCore.Qt.AlignTop)
combo_layout = QtWidgets.QHBoxLayout()
combo_layout.addWidget(self.release_status_name)
combo_layout.addWidget(self.release_status_combo)
release_layout = QtWidgets.QVBoxLayout()
release_layout.addLayout(combo_layout)
release_layout.addWidget(self.conditions_of_use_label)
release_layout.addWidget(self.conditions_of_use_edit)
final_layout = QtWidgets.QVBoxLayout()
final_layout.addLayout(cite_layout)
final_layout.addLayout(release_layout)
self.setLayout(final_layout)
def set_author(self):
pass
def set_title(self):
pass
def set_journal(self):
pass
def set_volume(self):
pass
def set_year(self):
pass
def set_doi(self):
pass
def set_release_status(self):
pass
def set_conditions(self):
pass
#==============================================================================
# Data
#==============================================================================
class DataTab(QtWidgets.QWidget):
"""
hold the data in tabular form
"""
def __init__(self, parent=None):
super(DataTab, self).__init__(parent)
self.Data = None
self._z_headers = ['Frequency (Hz)',
'Real Zxx', 'Imag Zxx', 'Err Zxx',
'Real Zxy', 'Imag Zxy', 'Err Zxy',
'Real Zyx', 'Imag Zyx', 'Err Zyx',
'Real Zyy', 'Imag Zyy', 'Err Zyy']
self._t_headers = ['Frequency (Hz)',
'Real Tzx', 'Imag Tzx', 'Err Tzx',
'Real Tzy', 'Imag Tzy', 'Err Tzy']
self.setup_ui()
def setup_ui(self):
self.tab = QtWidgets.QTabWidget()
self.data_z_table = QtWidgets.QTableWidget()
self.data_z_table.setColumnCount(13)
self.data_z_table.setRowCount(100)
self.data_z_table.setHorizontalHeaderLabels(self._z_headers)
#setHorizontalHeaderLabels(headerlist)
self.tab.addTab(self.data_z_table, 'Impedance')
self.data_t_table = QtWidgets.QTableWidget()
self.data_t_table.setColumnCount(7)
self.data_t_table.setRowCount(100)
self.data_t_table.setHorizontalHeaderLabels(self._t_headers)
self.tab.addTab(self.data_t_table, 'Tipper')
layout = QtWidgets.QVBoxLayout()
layout.addWidget(self.tab)
self.setLayout(layout)
#==============================================================================
# Common functions
#==============================================================================
def _check_float(value):
try:
return_num = float(value)
except ValueError:
return_num = 0.0
return return_num
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
ex = MTMainWindow()
ex.show()
sys.exit(app.exec_()) | gpl-3.0 | -8,543,354,390,399,336,000 | 38.742397 | 104 | 0.594472 | false |
inveniosoftware/invenio-communities | docs/conf.py | 1 | 10051 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2021 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
from __future__ import print_function
import os
import sphinx.environment
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Do not warn on external images.
suppress_warnings = ['image.nonlocal_uri']
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Invenio-Communities'
copyright = u'2015, 2016, CERN'
author = u'CERN'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('..', 'invenio_communities', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
html_theme = 'alabaster'
html_theme_options = {
'description': 'Invenio module that adds support for communities.',
'github_user': 'inveniosoftware',
'github_repo': 'invenio-communities',
'github_button': False,
'github_banner': True,
'show_powered_by': False,
'extra_nav_links': {
'invenio-communities@GitHub': 'https://github.com/inveniosoftware/invenio-communities',
'invenio-communities@PyPI': 'https://pypi.python.org/pypi/invenio-communities/',
}
}
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'invenio-communities_namedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'invenio-communities.tex', u'invenio-communities Documentation',
u'CERN', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'invenio-communities', u'invenio-communities Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'invenio-communities', u'Invenio-Communities Documentation',
author, 'invenio-communities', 'Invenio module that adds support for communities.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# Autodoc configuraton.
autoclass_content = 'both'
| mit | 3,272,960,207,693,876,700 | 30.606918 | 95 | 0.698935 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.