repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
joomel1/phantomjs | src/qt/qtwebkit/Tools/QueueStatusServer/config/logging.py | 122 | 1582 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Specified in seconds
queue_log_duration = 60 * 60
| bsd-3-clause | -2,743,429,353,078,205,400 | -7,505,805,455,514,596,000 | 51.733333 | 72 | 0.780657 | false |
jablonskim/jupyweave | jupyweave/output_manager.py | 1 | 1220 | from os import makedirs
from os.path import join, dirname
import uuid
class OutputManager:
"""Responsible for managing """
def __init__(self, output_settings, input_filename):
self.__data_dir = output_settings.data_directory(input_filename)
self.__data_dir_url = output_settings.data_dir_url(input_filename)
self.__output_filename = output_settings.output_filename(input_filename)
def save_data(self, data, extension, filename=None):
"""Saves data to file, using output settings for path building"""
makedirs(self.__data_dir, exist_ok=True)
if filename is None:
filename = str.format('img_{0}{1}', str(uuid.uuid4()), extension)
else:
filename = str.format('{0}{1}', filename, extension)
file_path = join(self.__data_dir, filename)
file_url = join(self.__data_dir_url, filename)
with open(file_path, 'wb') as f:
f.write(data)
return file_url
def save_document(self, data):
"""Saves document to file"""
makedirs(dirname(self.__output_filename), exist_ok=True)
with open(self.__output_filename, 'w', encoding='utf8') as f:
f.write(data)
| mit | 1,059,474,747,532,728,700 | 7,858,091,389,664,936,000 | 32.888889 | 80 | 0.622951 | false |
jeffery-do/Vizdoombot | doom/lib/python3.5/site-packages/numpy/polynomial/hermite_e.py | 23 | 58014 | """
Objects for dealing with Hermite_e series.
This module provides a number of objects (mostly functions) useful for
dealing with Hermite_e series, including a `HermiteE` class that
encapsulates the usual arithmetic operations. (General information
on how this module represents and works with such polynomials is in the
docstring for its "parent" sub-package, `numpy.polynomial`).
Constants
---------
- `hermedomain` -- Hermite_e series default domain, [-1,1].
- `hermezero` -- Hermite_e series that evaluates identically to 0.
- `hermeone` -- Hermite_e series that evaluates identically to 1.
- `hermex` -- Hermite_e series for the identity map, ``f(x) = x``.
Arithmetic
----------
- `hermemulx` -- multiply a Hermite_e series in ``P_i(x)`` by ``x``.
- `hermeadd` -- add two Hermite_e series.
- `hermesub` -- subtract one Hermite_e series from another.
- `hermemul` -- multiply two Hermite_e series.
- `hermediv` -- divide one Hermite_e series by another.
- `hermeval` -- evaluate a Hermite_e series at given points.
- `hermeval2d` -- evaluate a 2D Hermite_e series at given points.
- `hermeval3d` -- evaluate a 3D Hermite_e series at given points.
- `hermegrid2d` -- evaluate a 2D Hermite_e series on a Cartesian product.
- `hermegrid3d` -- evaluate a 3D Hermite_e series on a Cartesian product.
Calculus
--------
- `hermeder` -- differentiate a Hermite_e series.
- `hermeint` -- integrate a Hermite_e series.
Misc Functions
--------------
- `hermefromroots` -- create a Hermite_e series with specified roots.
- `hermeroots` -- find the roots of a Hermite_e series.
- `hermevander` -- Vandermonde-like matrix for Hermite_e polynomials.
- `hermevander2d` -- Vandermonde-like matrix for 2D power series.
- `hermevander3d` -- Vandermonde-like matrix for 3D power series.
- `hermegauss` -- Gauss-Hermite_e quadrature, points and weights.
- `hermeweight` -- Hermite_e weight function.
- `hermecompanion` -- symmetrized companion matrix in Hermite_e form.
- `hermefit` -- least-squares fit returning a Hermite_e series.
- `hermetrim` -- trim leading coefficients from a Hermite_e series.
- `hermeline` -- Hermite_e series of given straight line.
- `herme2poly` -- convert a Hermite_e series to a polynomial.
- `poly2herme` -- convert a polynomial to a Hermite_e series.
Classes
-------
- `HermiteE` -- A Hermite_e series class.
See also
--------
`numpy.polynomial`
"""
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
import numpy.linalg as la
from . import polyutils as pu
from ._polybase import ABCPolyBase
__all__ = [
'hermezero', 'hermeone', 'hermex', 'hermedomain', 'hermeline',
'hermeadd', 'hermesub', 'hermemulx', 'hermemul', 'hermediv',
'hermepow', 'hermeval', 'hermeder', 'hermeint', 'herme2poly',
'poly2herme', 'hermefromroots', 'hermevander', 'hermefit', 'hermetrim',
'hermeroots', 'HermiteE', 'hermeval2d', 'hermeval3d', 'hermegrid2d',
'hermegrid3d', 'hermevander2d', 'hermevander3d', 'hermecompanion',
'hermegauss', 'hermeweight']
hermetrim = pu.trimcoef
def poly2herme(pol):
"""
poly2herme(pol)
Convert a polynomial to a Hermite series.
Convert an array representing the coefficients of a polynomial (relative
to the "standard" basis) ordered from lowest degree to highest, to an
array of the coefficients of the equivalent Hermite series, ordered
from lowest to highest degree.
Parameters
----------
pol : array_like
1-D array containing the polynomial coefficients
Returns
-------
c : ndarray
1-D array containing the coefficients of the equivalent Hermite
series.
See Also
--------
herme2poly
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy.polynomial.hermite_e import poly2herme
>>> poly2herme(np.arange(4))
array([ 2., 10., 2., 3.])
"""
[pol] = pu.as_series([pol])
deg = len(pol) - 1
res = 0
for i in range(deg, -1, -1):
res = hermeadd(hermemulx(res), pol[i])
return res
def herme2poly(c):
"""
Convert a Hermite series to a polynomial.
Convert an array representing the coefficients of a Hermite series,
ordered from lowest degree to highest, to an array of the coefficients
of the equivalent polynomial (relative to the "standard" basis) ordered
from lowest to highest degree.
Parameters
----------
c : array_like
1-D array containing the Hermite series coefficients, ordered
from lowest order term to highest.
Returns
-------
pol : ndarray
1-D array containing the coefficients of the equivalent polynomial
(relative to the "standard" basis) ordered from lowest order term
to highest.
See Also
--------
poly2herme
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy.polynomial.hermite_e import herme2poly
>>> herme2poly([ 2., 10., 2., 3.])
array([ 0., 1., 2., 3.])
"""
from .polynomial import polyadd, polysub, polymulx
[c] = pu.as_series([c])
n = len(c)
if n == 1:
return c
if n == 2:
return c
else:
c0 = c[-2]
c1 = c[-1]
# i is the current degree of c1
for i in range(n - 1, 1, -1):
tmp = c0
c0 = polysub(c[i - 2], c1*(i - 1))
c1 = polyadd(tmp, polymulx(c1))
return polyadd(c0, polymulx(c1))
#
# These are constant arrays are of integer type so as to be compatible
# with the widest range of other types, such as Decimal.
#
# Hermite
hermedomain = np.array([-1, 1])
# Hermite coefficients representing zero.
hermezero = np.array([0])
# Hermite coefficients representing one.
hermeone = np.array([1])
# Hermite coefficients representing the identity x.
hermex = np.array([0, 1])
def hermeline(off, scl):
"""
Hermite series whose graph is a straight line.
Parameters
----------
off, scl : scalars
The specified line is given by ``off + scl*x``.
Returns
-------
y : ndarray
This module's representation of the Hermite series for
``off + scl*x``.
See Also
--------
polyline, chebline
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeline
>>> from numpy.polynomial.hermite_e import hermeline, hermeval
>>> hermeval(0,hermeline(3, 2))
3.0
>>> hermeval(1,hermeline(3, 2))
5.0
"""
if scl != 0:
return np.array([off, scl])
else:
return np.array([off])
def hermefromroots(roots):
"""
Generate a HermiteE series with given roots.
The function returns the coefficients of the polynomial
.. math:: p(x) = (x - r_0) * (x - r_1) * ... * (x - r_n),
in HermiteE form, where the `r_n` are the roots specified in `roots`.
If a zero has multiplicity n, then it must appear in `roots` n times.
For instance, if 2 is a root of multiplicity three and 3 is a root of
multiplicity 2, then `roots` looks something like [2, 2, 2, 3, 3]. The
roots can appear in any order.
If the returned coefficients are `c`, then
.. math:: p(x) = c_0 + c_1 * He_1(x) + ... + c_n * He_n(x)
The coefficient of the last term is not generally 1 for monic
polynomials in HermiteE form.
Parameters
----------
roots : array_like
Sequence containing the roots.
Returns
-------
out : ndarray
1-D array of coefficients. If all roots are real then `out` is a
real array, if some of the roots are complex, then `out` is complex
even if all the coefficients in the result are real (see Examples
below).
See Also
--------
polyfromroots, legfromroots, lagfromroots, hermfromroots,
chebfromroots.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermefromroots, hermeval
>>> coef = hermefromroots((-1, 0, 1))
>>> hermeval((-1, 0, 1), coef)
array([ 0., 0., 0.])
>>> coef = hermefromroots((-1j, 1j))
>>> hermeval((-1j, 1j), coef)
array([ 0.+0.j, 0.+0.j])
"""
if len(roots) == 0:
return np.ones(1)
else:
[roots] = pu.as_series([roots], trim=False)
roots.sort()
p = [hermeline(-r, 1) for r in roots]
n = len(p)
while n > 1:
m, r = divmod(n, 2)
tmp = [hermemul(p[i], p[i+m]) for i in range(m)]
if r:
tmp[0] = hermemul(tmp[0], p[-1])
p = tmp
n = m
return p[0]
def hermeadd(c1, c2):
"""
Add one Hermite series to another.
Returns the sum of two Hermite series `c1` + `c2`. The arguments
are sequences of coefficients ordered from lowest order term to
highest, i.e., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the Hermite series of their sum.
See Also
--------
hermesub, hermemul, hermediv, hermepow
Notes
-----
Unlike multiplication, division, etc., the sum of two Hermite series
is a Hermite series (without having to "reproject" the result onto
the basis set) so addition, just like that of "standard" polynomials,
is simply "component-wise."
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeadd
>>> hermeadd([1, 2, 3], [1, 2, 3, 4])
array([ 2., 4., 6., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] += c2
ret = c1
else:
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermesub(c1, c2):
"""
Subtract one Hermite series from another.
Returns the difference of two Hermite series `c1` - `c2`. The
sequences of coefficients are from lowest order term to highest, i.e.,
[1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Hermite series coefficients representing their difference.
See Also
--------
hermeadd, hermemul, hermediv, hermepow
Notes
-----
Unlike multiplication, division, etc., the difference of two Hermite
series is a Hermite series (without having to "reproject" the result
onto the basis set) so subtraction, just like that of "standard"
polynomials, is simply "component-wise."
Examples
--------
>>> from numpy.polynomial.hermite_e import hermesub
>>> hermesub([1, 2, 3, 4], [1, 2, 3])
array([ 0., 0., 0., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] -= c2
ret = c1
else:
c2 = -c2
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermemulx(c):
"""Multiply a Hermite series by x.
Multiply the Hermite series `c` by x, where x is the independent
variable.
Parameters
----------
c : array_like
1-D array of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the result of the multiplication.
Notes
-----
The multiplication uses the recursion relationship for Hermite
polynomials in the form
.. math::
xP_i(x) = (P_{i + 1}(x) + iP_{i - 1}(x)))
Examples
--------
>>> from numpy.polynomial.hermite_e import hermemulx
>>> hermemulx([1, 2, 3])
array([ 2., 7., 2., 3.])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
# The zero series needs special treatment
if len(c) == 1 and c[0] == 0:
return c
prd = np.empty(len(c) + 1, dtype=c.dtype)
prd[0] = c[0]*0
prd[1] = c[0]
for i in range(1, len(c)):
prd[i + 1] = c[i]
prd[i - 1] += c[i]*i
return prd
def hermemul(c1, c2):
"""
Multiply one Hermite series by another.
Returns the product of two Hermite series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Hermite series coefficients representing their product.
See Also
--------
hermeadd, hermesub, hermediv, hermepow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Hermite polynomial basis set. Thus, to express
the product as a Hermite series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermemul
>>> hermemul([1, 2, 3], [0, 1, 2])
array([ 14., 15., 28., 7., 6.])
"""
# s1, s2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else:
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = hermesub(c[-i]*xs, c1*(nd - 1))
c1 = hermeadd(tmp, hermemulx(c1))
return hermeadd(c0, hermemulx(c1))
def hermediv(c1, c2):
"""
Divide one Hermite series by another.
Returns the quotient-with-remainder of two Hermite series
`c1` / `c2`. The arguments are sequences of coefficients from lowest
order "term" to highest, e.g., [1,2,3] represents the series
``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
[quo, rem] : ndarrays
Of Hermite series coefficients representing the quotient and
remainder.
See Also
--------
hermeadd, hermesub, hermemul, hermepow
Notes
-----
In general, the (polynomial) division of one Hermite series by another
results in quotient and remainder terms that are not in the Hermite
polynomial basis set. Thus, to express these results as a Hermite
series, it is necessary to "reproject" the results onto the Hermite
basis set, which may produce "unintuitive" (but correct) results; see
Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermediv
>>> hermediv([ 14., 15., 28., 7., 6.], [0, 1, 2])
(array([ 1., 2., 3.]), array([ 0.]))
>>> hermediv([ 15., 17., 28., 7., 6.], [0, 1, 2])
(array([ 1., 2., 3.]), array([ 1., 2.]))
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0:
raise ZeroDivisionError()
lc1 = len(c1)
lc2 = len(c2)
if lc1 < lc2:
return c1[:1]*0, c1
elif lc2 == 1:
return c1/c2[-1], c1[:1]*0
else:
quo = np.empty(lc1 - lc2 + 1, dtype=c1.dtype)
rem = c1
for i in range(lc1 - lc2, - 1, -1):
p = hermemul([0]*i + [1], c2)
q = rem[-1]/p[-1]
rem = rem[:-1] - q*p[:-1]
quo[i] = q
return quo, pu.trimseq(rem)
def hermepow(c, pow, maxpower=16):
"""Raise a Hermite series to a power.
Returns the Hermite series `c` raised to the power `pow`. The
argument `c` is a sequence of coefficients ordered from low to high.
i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.``
Parameters
----------
c : array_like
1-D array of Hermite series coefficients ordered from low to
high.
pow : integer
Power to which the series will be raised
maxpower : integer, optional
Maximum power allowed. This is mainly to limit growth of the series
to unmanageable size. Default is 16
Returns
-------
coef : ndarray
Hermite series of power.
See Also
--------
hermeadd, hermesub, hermemul, hermediv
Examples
--------
>>> from numpy.polynomial.hermite_e import hermepow
>>> hermepow([1, 2, 3], 2)
array([ 23., 28., 46., 12., 9.])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
power = int(pow)
if power != pow or power < 0:
raise ValueError("Power must be a non-negative integer.")
elif maxpower is not None and power > maxpower:
raise ValueError("Power is too large")
elif power == 0:
return np.array([1], dtype=c.dtype)
elif power == 1:
return c
else:
# This can be made more efficient by using powers of two
# in the usual way.
prd = c
for i in range(2, power + 1):
prd = hermemul(prd, c)
return prd
def hermeder(c, m=1, scl=1, axis=0):
"""
Differentiate a Hermite_e series.
Returns the series coefficients `c` differentiated `m` times along
`axis`. At each iteration the result is multiplied by `scl` (the
scaling factor is for use in a linear change of variable). The argument
`c` is an array of coefficients from low to high degree along each
axis, e.g., [1,2,3] represents the series ``1*He_0 + 2*He_1 + 3*He_2``
while [[1,2],[1,2]] represents ``1*He_0(x)*He_0(y) + 1*He_1(x)*He_0(y)
+ 2*He_0(x)*He_1(y) + 2*He_1(x)*He_1(y)`` if axis=0 is ``x`` and axis=1
is ``y``.
Parameters
----------
c : array_like
Array of Hermite_e series coefficients. If `c` is multidimensional
the different axis correspond to different variables with the
degree in each axis given by the corresponding index.
m : int, optional
Number of derivatives taken, must be non-negative. (Default: 1)
scl : scalar, optional
Each differentiation is multiplied by `scl`. The end result is
multiplication by ``scl**m``. This is for use in a linear change of
variable. (Default: 1)
axis : int, optional
Axis over which the derivative is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
der : ndarray
Hermite series of the derivative.
See Also
--------
hermeint
Notes
-----
In general, the result of differentiating a Hermite series does not
resemble the same operation on a power series. Thus the result of this
function may be "unintuitive," albeit correct; see Examples section
below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeder
>>> hermeder([ 1., 1., 1., 1.])
array([ 1., 2., 3.])
>>> hermeder([-0.25, 1., 1./2., 1./3., 1./4 ], m=2)
array([ 1., 2., 3.])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of derivation must be integer")
if cnt < 0:
raise ValueError("The order of derivation must be non-negative")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
n = len(c)
if cnt >= n:
return c[:1]*0
else:
for i in range(cnt):
n = n - 1
c *= scl
der = np.empty((n,) + c.shape[1:], dtype=c.dtype)
for j in range(n, 0, -1):
der[j - 1] = j*c[j]
c = der
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermeint(c, m=1, k=[], lbnd=0, scl=1, axis=0):
"""
Integrate a Hermite_e series.
Returns the Hermite_e series coefficients `c` integrated `m` times from
`lbnd` along `axis`. At each iteration the resulting series is
**multiplied** by `scl` and an integration constant, `k`, is added.
The scaling factor is for use in a linear change of variable. ("Buyer
beware": note that, depending on what one is doing, one may want `scl`
to be the reciprocal of what one might expect; for more information,
see the Notes section below.) The argument `c` is an array of
coefficients from low to high degree along each axis, e.g., [1,2,3]
represents the series ``H_0 + 2*H_1 + 3*H_2`` while [[1,2],[1,2]]
represents ``1*H_0(x)*H_0(y) + 1*H_1(x)*H_0(y) + 2*H_0(x)*H_1(y) +
2*H_1(x)*H_1(y)`` if axis=0 is ``x`` and axis=1 is ``y``.
Parameters
----------
c : array_like
Array of Hermite_e series coefficients. If c is multidimensional
the different axis correspond to different variables with the
degree in each axis given by the corresponding index.
m : int, optional
Order of integration, must be positive. (Default: 1)
k : {[], list, scalar}, optional
Integration constant(s). The value of the first integral at
``lbnd`` is the first value in the list, the value of the second
integral at ``lbnd`` is the second value, etc. If ``k == []`` (the
default), all constants are set to zero. If ``m == 1``, a single
scalar can be given instead of a list.
lbnd : scalar, optional
The lower bound of the integral. (Default: 0)
scl : scalar, optional
Following each integration the result is *multiplied* by `scl`
before the integration constant is added. (Default: 1)
axis : int, optional
Axis over which the integral is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
S : ndarray
Hermite_e series coefficients of the integral.
Raises
------
ValueError
If ``m < 0``, ``len(k) > m``, ``np.isscalar(lbnd) == False``, or
``np.isscalar(scl) == False``.
See Also
--------
hermeder
Notes
-----
Note that the result of each integration is *multiplied* by `scl`.
Why is this important to note? Say one is making a linear change of
variable :math:`u = ax + b` in an integral relative to `x`. Then
.. math::`dx = du/a`, so one will need to set `scl` equal to
:math:`1/a` - perhaps not what one would have first thought.
Also note that, in general, the result of integrating a C-series needs
to be "reprojected" onto the C-series basis set. Thus, typically,
the result of this function is "unintuitive," albeit correct; see
Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeint
>>> hermeint([1, 2, 3]) # integrate once, value 0 at 0.
array([ 1., 1., 1., 1.])
>>> hermeint([1, 2, 3], m=2) # integrate twice, value & deriv 0 at 0
array([-0.25 , 1. , 0.5 , 0.33333333, 0.25 ])
>>> hermeint([1, 2, 3], k=1) # integrate once, value 1 at 0.
array([ 2., 1., 1., 1.])
>>> hermeint([1, 2, 3], lbnd=-1) # integrate once, value 0 at -1
array([-1., 1., 1., 1.])
>>> hermeint([1, 2, 3], m=2, k=[1, 2], lbnd=-1)
array([ 1.83333333, 0. , 0.5 , 0.33333333, 0.25 ])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if not np.iterable(k):
k = [k]
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of integration must be integer")
if cnt < 0:
raise ValueError("The order of integration must be non-negative")
if len(k) > cnt:
raise ValueError("Too many integration constants")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
k = list(k) + [0]*(cnt - len(k))
for i in range(cnt):
n = len(c)
c *= scl
if n == 1 and np.all(c[0] == 0):
c[0] += k[i]
else:
tmp = np.empty((n + 1,) + c.shape[1:], dtype=c.dtype)
tmp[0] = c[0]*0
tmp[1] = c[0]
for j in range(1, n):
tmp[j + 1] = c[j]/(j + 1)
tmp[0] += k[i] - hermeval(lbnd, tmp)
c = tmp
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermeval(x, c, tensor=True):
"""
Evaluate an HermiteE series at points x.
If `c` is of length `n + 1`, this function returns the value:
.. math:: p(x) = c_0 * He_0(x) + c_1 * He_1(x) + ... + c_n * He_n(x)
The parameter `x` is converted to an array only if it is a tuple or a
list, otherwise it is treated as a scalar. In either case, either `x`
or its elements must support multiplication and addition both with
themselves and with the elements of `c`.
If `c` is a 1-D array, then `p(x)` will have the same shape as `x`. If
`c` is multidimensional, then the shape of the result depends on the
value of `tensor`. If `tensor` is true the shape will be c.shape[1:] +
x.shape. If `tensor` is false the shape will be c.shape[1:]. Note that
scalars have shape (,).
Trailing zeros in the coefficients will be used in the evaluation, so
they should be avoided if efficiency is a concern.
Parameters
----------
x : array_like, compatible object
If `x` is a list or tuple, it is converted to an ndarray, otherwise
it is left unchanged and treated as a scalar. In either case, `x`
or its elements must support addition and multiplication with
with themselves and with the elements of `c`.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree n are contained in c[n]. If `c` is multidimensional the
remaining indices enumerate multiple polynomials. In the two
dimensional case the coefficients may be thought of as stored in
the columns of `c`.
tensor : boolean, optional
If True, the shape of the coefficient array is extended with ones
on the right, one for each dimension of `x`. Scalars have dimension 0
for this action. The result is that every column of coefficients in
`c` is evaluated for every element of `x`. If False, `x` is broadcast
over the columns of `c` for the evaluation. This keyword is useful
when `c` is multidimensional. The default value is True.
.. versionadded:: 1.7.0
Returns
-------
values : ndarray, algebra_like
The shape of the return value is described above.
See Also
--------
hermeval2d, hermegrid2d, hermeval3d, hermegrid3d
Notes
-----
The evaluation uses Clenshaw recursion, aka synthetic division.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeval
>>> coef = [1,2,3]
>>> hermeval(1, coef)
3.0
>>> hermeval([[1,2],[3,4]], coef)
array([[ 3., 14.],
[ 31., 54.]])
"""
c = np.array(c, ndmin=1, copy=0)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if isinstance(x, (tuple, list)):
x = np.asarray(x)
if isinstance(x, np.ndarray) and tensor:
c = c.reshape(c.shape + (1,)*x.ndim)
if len(c) == 1:
c0 = c[0]
c1 = 0
elif len(c) == 2:
c0 = c[0]
c1 = c[1]
else:
nd = len(c)
c0 = c[-2]
c1 = c[-1]
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = c[-i] - c1*(nd - 1)
c1 = tmp + c1*x
return c0 + c1*x
def hermeval2d(x, y, c):
"""
Evaluate a 2-D HermiteE series at points (x, y).
This function returns the values:
.. math:: p(x,y) = \\sum_{i,j} c_{i,j} * He_i(x) * He_j(y)
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars and they
must have the same shape after conversion. In either case, either `x`
and `y` or their elements must support multiplication and addition both
with themselves and with the elements of `c`.
If `c` is a 1-D array a one is implicitly appended to its shape to make
it 2-D. The shape of the result will be c.shape[2:] + x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points `(x, y)`,
where `x` and `y` must have the same shape. If `x` or `y` is a list
or tuple, it is first converted to an ndarray, otherwise it is left
unchanged and if it isn't an ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term
of multi-degree i,j is contained in ``c[i,j]``. If `c` has
dimension greater than two the remaining indices enumerate multiple
sets of coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points formed with
pairs of corresponding values from `x` and `y`.
See Also
--------
hermeval, hermegrid2d, hermeval3d, hermegrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y = np.array((x, y), copy=0)
except:
raise ValueError('x, y are incompatible')
c = hermeval(x, c)
c = hermeval(y, c, tensor=False)
return c
def hermegrid2d(x, y, c):
"""
Evaluate a 2-D HermiteE series on the Cartesian product of x and y.
This function returns the values:
.. math:: p(a,b) = \sum_{i,j} c_{i,j} * H_i(a) * H_j(b)
where the points `(a, b)` consist of all pairs formed by taking
`a` from `x` and `b` from `y`. The resulting points form a grid with
`x` in the first dimension and `y` in the second.
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars. In either
case, either `x` and `y` or their elements must support multiplication
and addition both with themselves and with the elements of `c`.
If `c` has fewer than two dimensions, ones are implicitly appended to
its shape to make it 2-D. The shape of the result will be c.shape[2:] +
x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points in the
Cartesian product of `x` and `y`. If `x` or `y` is a list or
tuple, it is first converted to an ndarray, otherwise it is left
unchanged and, if it isn't an ndarray, it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
hermeval, hermeval2d, hermeval3d, hermegrid3d
Notes
-----
.. versionadded::1.7.0
"""
c = hermeval(x, c)
c = hermeval(y, c)
return c
def hermeval3d(x, y, z, c):
"""
Evaluate a 3-D Hermite_e series at points (x, y, z).
This function returns the values:
.. math:: p(x,y,z) = \\sum_{i,j,k} c_{i,j,k} * He_i(x) * He_j(y) * He_k(z)
The parameters `x`, `y`, and `z` are converted to arrays only if
they are tuples or a lists, otherwise they are treated as a scalars and
they must have the same shape after conversion. In either case, either
`x`, `y`, and `z` or their elements must support multiplication and
addition both with themselves and with the elements of `c`.
If `c` has fewer than 3 dimensions, ones are implicitly appended to its
shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape.
Parameters
----------
x, y, z : array_like, compatible object
The three dimensional series is evaluated at the points
`(x, y, z)`, where `x`, `y`, and `z` must have the same shape. If
any of `x`, `y`, or `z` is a list or tuple, it is first converted
to an ndarray, otherwise it is left unchanged and if it isn't an
ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j,k is contained in ``c[i,j,k]``. If `c` has dimension
greater than 3 the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the multidimensional polynomial on points formed with
triples of corresponding values from `x`, `y`, and `z`.
See Also
--------
hermeval, hermeval2d, hermegrid2d, hermegrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y, z = np.array((x, y, z), copy=0)
except:
raise ValueError('x, y, z are incompatible')
c = hermeval(x, c)
c = hermeval(y, c, tensor=False)
c = hermeval(z, c, tensor=False)
return c
def hermegrid3d(x, y, z, c):
"""
Evaluate a 3-D HermiteE series on the Cartesian product of x, y, and z.
This function returns the values:
.. math:: p(a,b,c) = \\sum_{i,j,k} c_{i,j,k} * He_i(a) * He_j(b) * He_k(c)
where the points `(a, b, c)` consist of all triples formed by taking
`a` from `x`, `b` from `y`, and `c` from `z`. The resulting points form
a grid with `x` in the first dimension, `y` in the second, and `z` in
the third.
The parameters `x`, `y`, and `z` are converted to arrays only if they
are tuples or a lists, otherwise they are treated as a scalars. In
either case, either `x`, `y`, and `z` or their elements must support
multiplication and addition both with themselves and with the elements
of `c`.
If `c` has fewer than three dimensions, ones are implicitly appended to
its shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape + y.shape + z.shape.
Parameters
----------
x, y, z : array_like, compatible objects
The three dimensional series is evaluated at the points in the
Cartesian product of `x`, `y`, and `z`. If `x`,`y`, or `z` is a
list or tuple, it is first converted to an ndarray, otherwise it is
left unchanged and, if it isn't an ndarray, it is treated as a
scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
hermeval, hermeval2d, hermegrid2d, hermeval3d
Notes
-----
.. versionadded::1.7.0
"""
c = hermeval(x, c)
c = hermeval(y, c)
c = hermeval(z, c)
return c
def hermevander(x, deg):
"""Pseudo-Vandermonde matrix of given degree.
Returns the pseudo-Vandermonde matrix of degree `deg` and sample points
`x`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., i] = He_i(x),
where `0 <= i <= deg`. The leading indices of `V` index the elements of
`x` and the last index is the degree of the HermiteE polynomial.
If `c` is a 1-D array of coefficients of length `n + 1` and `V` is the
array ``V = hermevander(x, n)``, then ``np.dot(V, c)`` and
``hermeval(x, c)`` are the same up to roundoff. This equivalence is
useful both for least squares fitting and for the evaluation of a large
number of HermiteE series of the same degree and sample points.
Parameters
----------
x : array_like
Array of points. The dtype is converted to float64 or complex128
depending on whether any of the elements are complex. If `x` is
scalar it is converted to a 1-D array.
deg : int
Degree of the resulting matrix.
Returns
-------
vander : ndarray
The pseudo-Vandermonde matrix. The shape of the returned matrix is
``x.shape + (deg + 1,)``, where The last index is the degree of the
corresponding HermiteE polynomial. The dtype will be the same as
the converted `x`.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermevander
>>> x = np.array([-1, 0, 1])
>>> hermevander(x, 3)
array([[ 1., -1., 0., 2.],
[ 1., 0., -1., -0.],
[ 1., 1., 0., -2.]])
"""
ideg = int(deg)
if ideg != deg:
raise ValueError("deg must be integer")
if ideg < 0:
raise ValueError("deg must be non-negative")
x = np.array(x, copy=0, ndmin=1) + 0.0
dims = (ideg + 1,) + x.shape
dtyp = x.dtype
v = np.empty(dims, dtype=dtyp)
v[0] = x*0 + 1
if ideg > 0:
v[1] = x
for i in range(2, ideg + 1):
v[i] = (v[i-1]*x - v[i-2]*(i - 1))
return np.rollaxis(v, 0, v.ndim)
def hermevander2d(x, y, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y)`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., deg[1]*i + j] = He_i(x) * He_j(y),
where `0 <= i <= deg[0]` and `0 <= j <= deg[1]`. The leading indices of
`V` index the points `(x, y)` and the last index encodes the degrees of
the HermiteE polynomials.
If ``V = hermevander2d(x, y, [xdeg, ydeg])``, then the columns of `V`
correspond to the elements of a 2-D coefficient array `c` of shape
(xdeg + 1, ydeg + 1) in the order
.. math:: c_{00}, c_{01}, c_{02} ... , c_{10}, c_{11}, c_{12} ...
and ``np.dot(V, c.flat)`` and ``hermeval2d(x, y, c)`` will be the same
up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 2-D HermiteE
series of the same degrees and sample points.
Parameters
----------
x, y : array_like
Arrays of point coordinates, all of the same shape. The dtypes
will be converted to either float64 or complex128 depending on
whether any of the elements are complex. Scalars are converted to
1-D arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg].
Returns
-------
vander2d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)`. The dtype will be the same
as the converted `x` and `y`.
See Also
--------
hermevander, hermevander3d. hermeval2d, hermeval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy = ideg
x, y = np.array((x, y), copy=0) + 0.0
vx = hermevander(x, degx)
vy = hermevander(y, degy)
v = vx[..., None]*vy[..., None,:]
return v.reshape(v.shape[:-2] + (-1,))
def hermevander3d(x, y, z, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y, z)`. If `l, m, n` are the given degrees in `x, y, z`,
then Hehe pseudo-Vandermonde matrix is defined by
.. math:: V[..., (m+1)(n+1)i + (n+1)j + k] = He_i(x)*He_j(y)*He_k(z),
where `0 <= i <= l`, `0 <= j <= m`, and `0 <= j <= n`. The leading
indices of `V` index the points `(x, y, z)` and the last index encodes
the degrees of the HermiteE polynomials.
If ``V = hermevander3d(x, y, z, [xdeg, ydeg, zdeg])``, then the columns
of `V` correspond to the elements of a 3-D coefficient array `c` of
shape (xdeg + 1, ydeg + 1, zdeg + 1) in the order
.. math:: c_{000}, c_{001}, c_{002},... , c_{010}, c_{011}, c_{012},...
and ``np.dot(V, c.flat)`` and ``hermeval3d(x, y, z, c)`` will be the
same up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 3-D HermiteE
series of the same degrees and sample points.
Parameters
----------
x, y, z : array_like
Arrays of point coordinates, all of the same shape. The dtypes will
be converted to either float64 or complex128 depending on whether
any of the elements are complex. Scalars are converted to 1-D
arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg, z_deg].
Returns
-------
vander3d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)*(deg[2]+1)`. The dtype will
be the same as the converted `x`, `y`, and `z`.
See Also
--------
hermevander, hermevander3d. hermeval2d, hermeval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy, degz = ideg
x, y, z = np.array((x, y, z), copy=0) + 0.0
vx = hermevander(x, degx)
vy = hermevander(y, degy)
vz = hermevander(z, degz)
v = vx[..., None, None]*vy[..., None,:, None]*vz[..., None, None,:]
return v.reshape(v.shape[:-3] + (-1,))
def hermefit(x, y, deg, rcond=None, full=False, w=None):
"""
Least squares fit of Hermite series to data.
Return the coefficients of a HermiteE series of degree `deg` that is
the least squares fit to the data values `y` given at points `x`. If
`y` is 1-D the returned coefficients will also be 1-D. If `y` is 2-D
multiple fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * He_1(x) + ... + c_n * He_n(x),
where `n` is `deg`.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int or 1-D array_like
Degree(s) of the fitting polynomials. If `deg` is a single integer
all terms up to and including the `deg`'th term are included in the
fit. For Numpy versions >= 1.11 a list of integers specifying the
degrees of the terms to include may be used instead.
rcond : float, optional
Relative condition number of the fit. Singular values smaller than
this relative to the largest singular value will be ignored. The
default value is len(x)*eps, where eps is the relative precision of
the float type, about 2e-16 in most cases.
full : bool, optional
Switch determining nature of return value. When it is False (the
default) just the coefficients are returned, when True diagnostic
information from the singular value decomposition is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
Returns
-------
coef : ndarray, shape (M,) or (M, K)
Hermite coefficients ordered from low to high. If `y` was 2-D,
the coefficients for the data in column k of `y` are in column
`k`.
[residuals, rank, singular_values, rcond] : list
These values are only returned if `full` = True
resid -- sum of squared residuals of the least squares fit
rank -- the numerical rank of the scaled Vandermonde matrix
sv -- singular values of the scaled Vandermonde matrix
rcond -- value of `rcond`.
For more details, see `linalg.lstsq`.
Warns
-----
RankWarning
The rank of the coefficient matrix in the least-squares fit is
deficient. The warning is only raised if `full` = False. The
warnings can be turned off by
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, legfit, polyfit, hermfit, polyfit
hermeval : Evaluates a Hermite series.
hermevander : pseudo Vandermonde matrix of Hermite series.
hermeweight : HermiteE weight function.
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the HermiteE series `p` that
minimizes the sum of the weighted squared errors
.. math:: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where the :math:`w_j` are the weights. This problem is solved by
setting up the (typically) overdetermined matrix equation
.. math:: V(x) * c = w * y,
where `V` is the pseudo Vandermonde matrix of `x`, the elements of `c`
are the coefficients to be solved for, and the elements of `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected, then a `RankWarning` will be issued. This means that the
coefficient values may be poorly determined. Using a lower order fit
will usually get rid of the warning. The `rcond` parameter can also be
set to a value smaller than its default, but the resulting fit may be
spurious and have large contributions from roundoff error.
Fits using HermiteE series are probably most useful when the data can
be approximated by ``sqrt(w(x)) * p(x)``, where `w(x)` is the HermiteE
weight. In that case the weight ``sqrt(w(x[i])`` should be used
together with data values ``y[i]/sqrt(w(x[i])``. The weight function is
available as `hermeweight`.
References
----------
.. [1] Wikipedia, "Curve fitting",
http://en.wikipedia.org/wiki/Curve_fitting
Examples
--------
>>> from numpy.polynomial.hermite_e import hermefik, hermeval
>>> x = np.linspace(-10, 10)
>>> err = np.random.randn(len(x))/10
>>> y = hermeval(x, [1, 2, 3]) + err
>>> hermefit(x, y, 2)
array([ 1.01690445, 1.99951418, 2.99948696])
"""
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
deg = np.asarray(deg)
# check arguments.
if deg.ndim > 1 or deg.dtype.kind not in 'iu' or deg.size == 0:
raise TypeError("deg must be an int or non-empty 1-D array of int")
if deg.min() < 0:
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2:
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
if deg.ndim == 0:
lmax = deg
order = lmax + 1
van = hermevander(x, lmax)
else:
deg = np.sort(deg)
lmax = deg[-1]
order = len(deg)
van = hermevander(x, lmax)[:, deg]
# set up the least squares matrices in transposed form
lhs = van.T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# apply weights. Don't use inplace operations as they
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None:
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# Expand c to include non-fitted coefficients which are set to zero
if deg.ndim > 0:
if c.ndim == 2:
cc = np.zeros((lmax+1, c.shape[1]), dtype=c.dtype)
else:
cc = np.zeros(lmax+1, dtype=c.dtype)
cc[deg] = c
c = cc
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning)
if full:
return c, [resids, rank, s, rcond]
else:
return c
def hermecompanion(c):
"""
Return the scaled companion matrix of c.
The basis polynomials are scaled so that the companion matrix is
symmetric when `c` is an HermiteE basis polynomial. This provides
better eigenvalue estimates than the unscaled case and for basis
polynomials the eigenvalues are guaranteed to be real if
`numpy.linalg.eigvalsh` is used to obtain them.
Parameters
----------
c : array_like
1-D array of HermiteE series coefficients ordered from low to high
degree.
Returns
-------
mat : ndarray
Scaled companion matrix of dimensions (deg, deg).
Notes
-----
.. versionadded::1.7.0
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
raise ValueError('Series must have maximum degree of at least 1.')
if len(c) == 2:
return np.array([[-c[0]/c[1]]])
n = len(c) - 1
mat = np.zeros((n, n), dtype=c.dtype)
scl = np.hstack((1., 1./np.sqrt(np.arange(n - 1, 0, -1))))
scl = np.multiply.accumulate(scl)[::-1]
top = mat.reshape(-1)[1::n+1]
bot = mat.reshape(-1)[n::n+1]
top[...] = np.sqrt(np.arange(1, n))
bot[...] = top
mat[:, -1] -= scl*c[:-1]/c[-1]
return mat
def hermeroots(c):
"""
Compute the roots of a HermiteE series.
Return the roots (a.k.a. "zeros") of the polynomial
.. math:: p(x) = \\sum_i c[i] * He_i(x).
Parameters
----------
c : 1-D array_like
1-D array of coefficients.
Returns
-------
out : ndarray
Array of the roots of the series. If all the roots are real,
then `out` is also real, otherwise it is complex.
See Also
--------
polyroots, legroots, lagroots, hermroots, chebroots
Notes
-----
The root estimates are obtained as the eigenvalues of the companion
matrix, Roots far from the origin of the complex plane may have large
errors due to the numerical instability of the series for such
values. Roots with multiplicity greater than 1 will also show larger
errors as the value of the series near such points is relatively
insensitive to errors in the roots. Isolated roots near the origin can
be improved by a few iterations of Newton's method.
The HermiteE series basis polynomials aren't powers of `x` so the
results of this function may seem unintuitive.
Examples
--------
>>> from numpy.polynomial.hermite_e import hermeroots, hermefromroots
>>> coef = hermefromroots([-1, 0, 1])
>>> coef
array([ 0., 2., 0., 1.])
>>> hermeroots(coef)
array([-1., 0., 1.])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) <= 1:
return np.array([], dtype=c.dtype)
if len(c) == 2:
return np.array([-c[0]/c[1]])
m = hermecompanion(c)
r = la.eigvals(m)
r.sort()
return r
def _normed_hermite_e_n(x, n):
"""
Evaluate a normalized HermiteE polynomial.
Compute the value of the normalized HermiteE polynomial of degree ``n``
at the points ``x``.
Parameters
----------
x : ndarray of double.
Points at which to evaluate the function
n : int
Degree of the normalized HermiteE function to be evaluated.
Returns
-------
values : ndarray
The shape of the return value is described above.
Notes
-----
.. versionadded:: 1.10.0
This function is needed for finding the Gauss points and integration
weights for high degrees. The values of the standard HermiteE functions
overflow when n >= 207.
"""
if n == 0:
return np.ones(x.shape)/np.sqrt(np.sqrt(2*np.pi))
c0 = 0.
c1 = 1./np.sqrt(np.sqrt(2*np.pi))
nd = float(n)
for i in range(n - 1):
tmp = c0
c0 = -c1*np.sqrt((nd - 1.)/nd)
c1 = tmp + c1*x*np.sqrt(1./nd)
nd = nd - 1.0
return c0 + c1*x
def hermegauss(deg):
"""
Gauss-HermiteE quadrature.
Computes the sample points and weights for Gauss-HermiteE quadrature.
These sample points and weights will correctly integrate polynomials of
degree :math:`2*deg - 1` or less over the interval :math:`[-\inf, \inf]`
with the weight function :math:`f(x) = \exp(-x^2/2)`.
Parameters
----------
deg : int
Number of sample points and weights. It must be >= 1.
Returns
-------
x : ndarray
1-D ndarray containing the sample points.
y : ndarray
1-D ndarray containing the weights.
Notes
-----
.. versionadded::1.7.0
The results have only been tested up to degree 100, higher degrees may
be problematic. The weights are determined by using the fact that
.. math:: w_k = c / (He'_n(x_k) * He_{n-1}(x_k))
where :math:`c` is a constant independent of :math:`k` and :math:`x_k`
is the k'th root of :math:`He_n`, and then scaling the results to get
the right value when integrating 1.
"""
ideg = int(deg)
if ideg != deg or ideg < 1:
raise ValueError("deg must be a non-negative integer")
# first approximation of roots. We use the fact that the companion
# matrix is symmetric in this case in order to obtain better zeros.
c = np.array([0]*deg + [1])
m = hermecompanion(c)
x = la.eigvalsh(m)
# improve roots by one application of Newton
dy = _normed_hermite_e_n(x, ideg)
df = _normed_hermite_e_n(x, ideg - 1) * np.sqrt(ideg)
x -= dy/df
# compute the weights. We scale the factor to avoid possible numerical
# overflow.
fm = _normed_hermite_e_n(x, ideg - 1)
fm /= np.abs(fm).max()
w = 1/(fm * fm)
# for Hermite_e we can also symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w to get the right value
w *= np.sqrt(2*np.pi) / w.sum()
return x, w
def hermeweight(x):
"""Weight function of the Hermite_e polynomials.
The weight function is :math:`\exp(-x^2/2)` and the interval of
integration is :math:`[-\inf, \inf]`. the HermiteE polynomials are
orthogonal, but not normalized, with respect to this weight function.
Parameters
----------
x : array_like
Values at which the weight function will be computed.
Returns
-------
w : ndarray
The weight function at `x`.
Notes
-----
.. versionadded::1.7.0
"""
w = np.exp(-.5*x**2)
return w
#
# HermiteE series class
#
class HermiteE(ABCPolyBase):
"""An HermiteE series class.
The HermiteE class provides the standard Python numerical methods
'+', '-', '*', '//', '%', 'divmod', '**', and '()' as well as the
attributes and methods listed in the `ABCPolyBase` documentation.
Parameters
----------
coef : array_like
HermiteE coefficients in order of increasing degree, i.e,
``(1, 2, 3)`` gives ``1*He_0(x) + 2*He_1(X) + 3*He_2(x)``.
domain : (2,) array_like, optional
Domain to use. The interval ``[domain[0], domain[1]]`` is mapped
to the interval ``[window[0], window[1]]`` by shifting and scaling.
The default value is [-1, 1].
window : (2,) array_like, optional
Window, see `domain` for its use. The default value is [-1, 1].
.. versionadded:: 1.6.0
"""
# Virtual Functions
_add = staticmethod(hermeadd)
_sub = staticmethod(hermesub)
_mul = staticmethod(hermemul)
_div = staticmethod(hermediv)
_pow = staticmethod(hermepow)
_val = staticmethod(hermeval)
_int = staticmethod(hermeint)
_der = staticmethod(hermeder)
_fit = staticmethod(hermefit)
_line = staticmethod(hermeline)
_roots = staticmethod(hermeroots)
_fromroots = staticmethod(hermefromroots)
# Virtual properties
nickname = 'herme'
domain = np.array(hermedomain)
window = np.array(hermedomain)
| mit | 1,044,167,859,994,101,100 | 4,129,253,049,813,811,000 | 30.341977 | 79 | 0.596666 | false |
brianhelba/pylibtiff | libtiff/tests/test_simple.py | 2 | 1148 | import os
from tempfile import mktemp
from numpy import *
from libtiff import TIFF
def test_write_read():
for itype in [uint8, uint16, uint32, uint64,
int8, int16, int32, int64,
float32, float64,
complex64, complex128]:
image = array([[1, 2, 3], [4, 5, 6]], itype)
fn = mktemp('.tif')
tif = TIFF.open(fn, 'w')
tif.write_image(image)
tif.close()
tif = TIFF.open(fn, 'r')
image2 = tif.read_image()
tif.close()
os.remove(fn)
assert image.dtype == image2.dtype
assert (image == image2).all()
def test_slicing():
shape = (16, 16)
image = random.randint(255, size=shape)
for i in range(shape[0]):
for j in range(shape[1]):
image1 = image[:i + 1, :j + 1]
fn = mktemp('.tif')
tif = TIFF.open(fn, 'w')
tif.write_image(image1)
tif.close()
tif = TIFF.open(fn, 'r')
image2 = tif.read_image()
tif.close()
assert (image1 == image2).all(), repr((i, j))
os.remove(fn)
| bsd-3-clause | -6,403,933,059,319,846,000 | -7,562,036,878,767,602,000 | 24.511111 | 57 | 0.495645 | false |
jiankers/weevely3 | core/vectorlist.py | 14 | 6460 | """
The module `core.vectorlist` defines a `VectorList` object, normally used
to store the module vectors.
Module class executes `_register_vectors()` at init to initialize the `VectorList`
object as `self.vectors` module attribute.
The methods exposed by VectorList can be used to get the result of a
given vector execution with `get_result()`, get all the results of a bunch of
vectors with `get_results()`, or get the result of the first vector that
response in the way we want with `find_first_result()`.
"""
from core.vectors import Os
from mako.template import Template
from core.weexceptions import DevException
from core.loggers import log, dlog
from core import modules
import utils
from core import messages
class VectorList(list):
def __init__(self, session, module_name):
self.session = session
self.module_name = module_name
list.__init__(self)
def find_first_result(self, names = [], format_args = {}, condition = None, store_result = False, store_name = ''):
""" Execute all the vectors and return the first result matching the given condition.
Return the name and the result of the first vector execution response that satisfy
the given condition.
With unspecified names, execute all the vectors. Optionally store results.
Exceptions triggered checking condition function are catched and logged.
Args:
names (list of str): The list of names of vectors to execute.
format_args (dict): The arguments dictionary used to format the vectors with.
condition (function): The function or lambda to check certain conditions on result.
Must returns boolean.
store_result (bool): Store as result.
store_name (str): Store the found vector name in the specified argument.
Returns:
Tuple. Contains the vector name and execution result in the
`( vector_name, result )` form.
"""
if not callable(condition):
raise DevException(messages.vectors.wrong_condition_type)
if not isinstance(store_name, str):
raise DevException(messages.vectors.wrong_store_name_type)
for vector in self:
# Skip with wrong vectors
if not self._os_match(vector.target): continue
# Clean names filter from empty objects
names = [ n for n in names if n ]
# Skip if names filter is passed but current vector is missing
if names and not any(n in vector.name for n in names): continue
# Add current vector name
format_args['current_vector'] = vector.name
# Run
result = vector.run(format_args)
# See if condition is verified
try:
condition_result = condition(result)
except Exception as e:
import traceback; log.info(traceback.format_exc())
log.debug(messages.vectorlist.vector_s_triggers_an_exc % vector.name)
condition_result = False
# Eventually store result or vector name
if condition_result:
if store_result:
self.session[self.module_name]['results'][vector.name] = result
if store_name:
self.session[self.module_name]['stored_args'][store_name] = vector.name
return vector.name, result
return None, None
def get_result(self, name, format_args = {}, store_result = False):
"""Execute one vector and return the result.
Run the vector with specified name. Optionally store results.
Args:
name (str): The name of vector to execute.
format_args (dict): The arguments dictionary used to format the vectors with.
store_result (bool): Store result in session.
Returns:
Object. Contains the vector execution result.
"""
vector = self.get_by_name(name)
if vector and self._os_match(vector.target):
# Add current vector name
format_args['current_vector'] = vector.name
result = vector.run(format_args)
if store_result:
self.session[self.module_name]['results'][name] = result
return result
def get_results(self, names = [], format_args = {}, results_to_store = [ ]):
"""Execute all the vectors and return the results.
With unspecified names, execute all the vectors. Optionally store results.
Returns a dictionary with results.
Args:
names (list of str): The list of names of vectors to execute.
format_args (dict): The arguments dictionary used to format the vectors with.
results_to_store (list of str): The list of names of the vectors which
store the execution result.
Returns:
Dictionary. Contains all the vector results in the
`{ vector_name : result }` form.
"""
response = {}
for vector in self:
if not self._os_match(vector.target): continue
if names and not any(x in vector.name for x in names): continue
# Add current vector name
format_args['current_vector'] = vector.name
response[vector.name] = vector.run(format_args)
if not any(x in vector.name for x in results_to_store): continue
self.session[self.module_name]['results'][vector.name] = response[vector.name]
return response
def _os_match(self, os):
"""Check if vector os is compatible with the remote os."""
os_string = self.session['system_info']['results'].get('os')
# If os_string is not set, just return True and continue
if not os_string: return True
os_current = Os.WIN if os_string.lower().startswith('win') else Os.NIX
return os in (os_current, Os.ANY)
def get_by_name(self, name):
"""Get the vector object by name.
Args:
name (str): the name of the requested vector.
Returns:
Vector object.
"""
return next((v for v in self if v.name == name), None)
def get_names(self):
"""Get the vectors names.
Returns:
List of strings. Contain vectors names.
"""
return [ v.name for v in self ]
| gpl-3.0 | -6,545,198,293,626,500,000 | 7,967,500,505,789,375,000 | 31.139303 | 119 | 0.613932 | false |
oskar456/turrisclock | fakegpio.py | 1 | 1282 | #!/usr/bin/env python
# vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 smartindent cinwords=if,elif,else,for,while,try,except,finally,def,class,with
from __future__ import print_function
class GPIO:
""" Class representing one fake GPIO signal for debugging purposes """
def __init__(self, name, direction="in"):
"""
@param name GPIO number (224, 225, ...) as a string
@direction string in or out
"""
self.name = str(name)
print("GPIO {} exported".format(name))
self.reset()
self.setDirection(direction)
def __del__(self):
""" Make sure direction is set to in to protect the SoC """
self.setDirection("in")
def setDirection(self, direction):
"""Sets pin direction"""
self.direction = direction
print("GPIO {} direction set to {}".format(self.name, direction))
def get(self):
"""Return current GPIO value"""
return False
def set(self, value=True):
"""Sets GPIO to value"""
print("GPIO {} set to {}".format(self.name, '1' if value else '0'))
def reset(self):
"""Sets GPIO to value 0"""
self.set(False)
def __repr__(self):
return "GPIO({}, {})".format(self.name, self.direction)
| mit | -3,254,289,439,055,237,000 | 7,747,664,954,380,009,000 | 29.52381 | 131 | 0.592824 | false |
mhrivnak/pulp | bindings/pulp/bindings/consumer_groups.py | 3 | 7364 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from pulp.bindings.search import SearchAPI
from pulp.bindings.base import PulpAPI
class ConsumerGroupAPI(PulpAPI):
"""
Connection class to access consumer group specific calls
"""
PATH = 'v2/consumer_groups/'
def consumer_groups(self):
"""
retrieve all consumer groups
:return: all consumer groups
:rtype: list
"""
return self.server.GET(self.PATH)
def create(self, consumer_group_id, display_name, description, notes):
"""
Create a consumer group.
:param consumer_group_id: unique primary key
:type consumer_group_id: basestring
:param display_name: Human-readable name
:type display_name: basestring
:param description: text description of the consumer group
:type description: basestring
:param notes: key-value pairs to programmatically tag the consumer
:type notes: dict
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
data = {'id': consumer_group_id,
'display_name': display_name,
'description': description,
'notes': notes,}
return self.server.POST(self.PATH, data)
def consumer_group(self, consumer_group_id):
"""
Retrieve a single consumer group
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
path = self.PATH + ('%s/' % consumer_group_id)
return self.server.GET(path)
def delete(self, consumer_group_id):
"""
Delete a single consumer group
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
path = self.PATH + '%s/' % consumer_group_id
return self.server.DELETE(path)
def update(self, consumer_group_id, delta):
"""
Update a consumer group
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param delta: map of new values with attribute names as keys.
:type delta: dict
:return: Response object
:rtype: pulp.bindings.responses.Response
"""
path = self.PATH + '%s/' % consumer_group_id
return self.server.PUT(path, delta)
class ConsumerGroupSearchAPI(SearchAPI):
"""
Consumer Group searching.
"""
PATH = 'v2/consumer_groups/search/'
class ConsumerGroupActionAPI(SearchAPI):
"""
Consumer Group Actions.
"""
PATH = 'v2/consumer_groups/%s/actions/'
def associate(self, consumer_group_id, **kwargs):
"""
Associate a set of consumers with a consumer group.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param kwargs: name based parameters that match the values accepted by
pulp.server.db.model.criteria.Criteria.__init__
:type kwargs: dict
:return: Response body
:rtype: basestring
"""
path = self.PATH % consumer_group_id + 'associate/'
filters = self.compose_filters(**kwargs)
if filters:
kwargs['filters'] = filters
self._strip_criteria_kwargs(kwargs)
response = self.server.POST(path, {'criteria':kwargs})
return response.response_body
def unassociate(self, consumer_group_id, **kwargs):
"""
Unassociate a set of consumers with a consumer group.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param kwargs: name based parameters that match the values accepted by
pulp.server.db.model.criteria.Criteria.__init__
:type kwargs: dict
:return: Response body
:rtype: basestring
"""
path = self.PATH % consumer_group_id + 'unassociate/'
filters = self.compose_filters(**kwargs)
if filters:
kwargs['filters'] = filters
self._strip_criteria_kwargs(kwargs)
response = self.server.POST(path, {'criteria':kwargs})
return response.response_body
class ConsumerGroupBindAPI(PulpAPI):
"""
Consumer Group bind operations
"""
PATH = 'v2/consumer_groups/%s/bindings/'
def bind(self, consumer_group_id, repo_id, distributor_id):
"""
Bind a consumer group to a distributor associated with a repository.
Each consumer in the consumer group will be bound.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param repo_id: repository id
:type repo_id: basestring
:param distributor_id: distributor id
:type distributor_id: basestring
"""
path = self.PATH % (consumer_group_id)
data = {'repo_id' : repo_id, 'distributor_id' : distributor_id}
response = self.server.POST(path, data)
return response
def unbind(self, consumer_group_id, repo_id, distributor_id):
"""
Unbind a consumer group to a distributor associated with a repository.
Each consumer in the consumer group will be unbound.
:param consumer_group_id: primary key for a consumer group
:type consumer_group_id: basestring
:param repo_id: repository id
:type repo_id: basestring
:param distributor_id: distributor id
:type distributor_id: basestring
"""
path = self.PATH % (consumer_group_id) + '%s/%s/' % (repo_id, distributor_id)
response = self.server.DELETE(path)
return response
class ConsumerGroupContentAPI(PulpAPI):
"""
Consumer Group content operations
"""
PATH = 'v2/consumer_groups/%s/actions/content/'
def install(self, consumer_group_id, units, options):
path = self.PATH % consumer_group_id + 'install/'
data = {"units": units,
"options": options,}
return self.server.POST(path, data)
def update(self, consumer_group_id, units, options):
path = self.PATH % consumer_group_id + 'update/'
data = {"units": units,
"options": options,}
return self.server.POST(path, data)
def uninstall(self, consumer_group_id, units, options):
path = self.PATH % consumer_group_id + 'uninstall/'
data = {"units": units,
"options": options,}
return self.server.POST(path, data)
| gpl-2.0 | -3,205,509,011,355,084,300 | -1,040,998,826,574,164,400 | 30.600858 | 85 | 0.620128 | false |
hehongliang/tensorflow | tensorflow/tools/dist_test/scripts_allreduce/k8s_generate_yaml.py | 11 | 2997 | #!/usr/bin/python
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generates YAML configuration file for allreduce-based distributed TensorFlow.
The workers will be run in a Kubernetes (k8s) container cluster.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import k8s_generate_yaml_lib
# Note: It is intentional that we do not import tensorflow in this script. The
# machine that launches a TensorFlow k8s cluster does not have to have the
# Python package of TensorFlow installed on it.
DEFAULT_DOCKER_IMAGE = 'tensorflow/tensorflow:latest-devel'
DEFAULT_PORT = 22
DEFAULT_CONFIG_MAP = 'k8s-config-map'
DEFAULT_DEPLOYMENT = 'k8s-ml-deployment'
def main():
"""Do arg parsing."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--docker_image',
type=str,
default=DEFAULT_DOCKER_IMAGE,
help='Override default docker image for the TensorFlow')
parser.add_argument(
'--num_containers',
type=int,
default=0,
help='How many docker containers to launch')
parser.add_argument(
'--config_map',
type=str,
default=DEFAULT_CONFIG_MAP,
help='Override default config map')
parser.add_argument(
'--deployment',
type=str,
default=DEFAULT_DEPLOYMENT,
help='Override default deployment')
parser.add_argument(
'--ssh_port',
type=int,
default=DEFAULT_PORT,
help='Override default ssh port (Default: %d)' % DEFAULT_PORT)
parser.add_argument(
'--use_hostnet',
type=int,
default=0,
help='Used to enable host network mode (Default: 0)')
parser.add_argument(
'--use_shared_volume',
type=int,
default=0,
help='Used to mount shared volume (Default: 0)')
args = parser.parse_args()
if args.num_containers <= 0:
sys.stderr.write('--num_containers must be greater than 0; received %d\n' %
args.num_containers)
sys.exit(1)
# Generate contents of yaml config
yaml_config = k8s_generate_yaml_lib.GenerateConfig(
args.docker_image, args.num_containers, args.config_map, args.deployment,
args.ssh_port, args.use_hostnet, args.use_shared_volume)
print(yaml_config) # pylint: disable=superfluous-parens
if __name__ == '__main__':
main()
| apache-2.0 | 7,432,226,265,317,795,000 | 1,241,374,018,825,644,800 | 31.225806 | 80 | 0.674007 | false |
abbeymiles/aima-python | submissions/Blue/myNN.py | 10 | 3071 | from sklearn import datasets
from sklearn.neural_network import MLPClassifier
import traceback
from submissions.Blue import music
class DataFrame:
data = []
feature_names = []
target = []
target_names = []
musicATRB = DataFrame()
musicATRB.data = []
targetData = []
'''
Extract data from the CORGIS Music Library.
Most 'hit' songs average 48-52 bars and no more than ~3 minutes (180 seconds)...
'''
allSongs = music.get_songs()
for song in allSongs:
try:
length = float(song['song']["duration"])
targetData.append(length)
genre = song['artist']['terms'] #String
title = song['song']['title'] #String
# release = float(song['song']['Release'])
musicATRB.data.append([genre, title])
except:
traceback.print_exc()
musicATRB.feature_names = [
'Genre',
'Title',
'Release',
'Length',
]
musicATRB.target = []
def musicTarget(release):
if (song['song']['duration'] <= 210
): #if the song is less that 3.5 minutes (210 seconds) long
return 1
return 0
for i in targetData:
tt = musicTarget(i)
musicATRB.target.append(tt)
musicATRB.target_names = [
'Not a hit song',
'Could be a hit song',
]
Examples = {
'Music': musicATRB,
}
'''
Make a customn classifier,
'''
mlpc = MLPClassifier(
hidden_layer_sizes = (100,),
activation = 'relu',
solver='sgd', # 'adam',
alpha = 0.0001,
# batch_size='auto',
learning_rate = 'adaptive', # 'constant',
# power_t = 0.5,
max_iter = 1000, # 200,
shuffle = True,
# random_state = None,
# tol = 1e-4,
# verbose = False,
# warm_start = False,
# momentum = 0.9,
# nesterovs_momentum = True,
# early_stopping = False,
# validation_fraction = 0.1,
# beta_1 = 0.9,
# beta_2 = 0.999,
# epsilon = 1e-8,
)
'''
Try scaling the data.
'''
musicScaled = DataFrame()
def setupScales(grid):
global min, max
min = list(grid[0])
max = list(grid[0])
for row in range(1, len(grid)):
for col in range(len(grid[row])):
cell = grid[row][col]
if cell < min[col]:
min[col] = cell
if cell > max[col]:
max[col] = cell
def scaleGrid(grid):
newGrid = []
for row in range(len(grid)):
newRow = []
for col in range(len(grid[row])):
try:
cell = grid[row][col]
scaled = (cell - min[col]) \
/ (max[col] - min[col])
newRow.append(scaled)
except:
pass
newGrid.append(newRow)
return newGrid
setupScales(musicATRB.data)
musicScaled.data = scaleGrid(musicATRB.data)
musicScaled.feature_names = musicATRB.feature_names
musicScaled.target = musicATRB.target
musicScaled.target_names = musicATRB.target_names
Examples = {
'musicDefault': {
'frame': musicATRB,
},
'MusicSGD': {
'frame': musicATRB,
'mlpc': mlpc
},
'MusisScaled': {
'frame': musicScaled,
},
} | mit | 5,946,634,718,357,194,000 | -700,500,402,494,217,600 | 20.942857 | 80 | 0.567568 | false |
chrisdunelm/grpc | test/http2_test/test_rst_after_header.py | 30 | 1214 | # Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import http2_base_server
class TestcaseRstStreamAfterHeader(object):
"""
In response to an incoming request, this test sends headers, followed by
a reset stream frame. Client asserts that the RPC failed.
"""
def __init__(self):
self._base_server = http2_base_server.H2ProtocolBaseServer()
self._base_server._handlers['RequestReceived'] = self.on_request_received
def get_base_server(self):
return self._base_server
def on_request_received(self, event):
# send initial headers
self._base_server.on_request_received_default(event)
# send reset stream
self._base_server.send_reset_stream()
| apache-2.0 | 2,170,719,423,922,863,600 | 685,974,078,904,252,300 | 35.787879 | 77 | 0.73888 | false |
wgwoods/anaconda | tests/dracut_tests/test_driver_updates.py | 3 | 27177 | # test_driver_updates.py - unittests for driver_updates.py
import unittest
try:
import unittest.mock as mock
except ImportError:
import mock
import os
import tempfile
import shutil
import sys
sys.path.append(os.path.normpath(os.path.dirname(__file__)+'/../../dracut'))
from driver_updates import copy_files, move_files, iter_files, ensure_dir
from driver_updates import append_line, mkdir_seq
def touch(path):
try:
open(path, 'a')
except IOError as e:
if e.errno != 17: raise
def makedir(path):
ensure_dir(path)
return path
def makefile(path):
makedir(os.path.dirname(path))
touch(path)
return path
def makefiles(*paths):
return [makefile(p) for p in paths]
class FileTestCaseBase(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix="test_driver_updates.")
self.srcdir = self.tmpdir+'/src/'
self.destdir = self.tmpdir+'/dest/'
def tearDown(self):
shutil.rmtree(self.tmpdir, ignore_errors=True)
def makefiles(self, *paths):
return [makefile(os.path.normpath(self.tmpdir+'/'+p)) for p in paths]
class SelfTestCase(FileTestCaseBase):
def test_makefiles(self):
"""check test helpers"""
filepaths = ["sub/dir/test.file", "testfile"]
self.makefiles(*filepaths)
for f in filepaths:
self.assertTrue(os.path.exists(self.tmpdir+'/'+f))
class TestCopyFiles(FileTestCaseBase):
def test_basic(self):
"""copy_file: copy files into destdir, leaving existing contents"""
files = self.makefiles("src/file1", "src/subdir/file2")
self.makefiles("dest/file3")
copy_files(files, self.destdir)
result = set(os.listdir(self.destdir))
self.assertEqual(result, set(["file1", "file2", "file3"]))
def test_overwrite(self):
"""copy_file: overwrite files in destdir if they have the same name"""
src, dest = self.makefiles("src/file1", "dest/file1")
with open(src, 'w') as outf:
outf.write("srcfile")
with open(dest, 'w') as outf:
outf.write("destfile")
copy_files([src], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "srcfile")
def test_samefile(self):
"""copy_file: skip files already in destdir"""
(dest,) = self.makefiles("dest/file1")
with open(dest, 'w') as outf:
outf.write("destfile")
copy_files([dest], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "destfile")
def test_copy_to_parent(self):
"""copy_file: skip files in subdirs of destdir"""
files = self.makefiles("dest/subdir/file1")
copy_files(files, self.destdir)
self.assertEqual(list(iter_files(self.destdir)), files)
class TestIterFiles(FileTestCaseBase):
def test_basic(self):
"""iter_files: iterates over full paths to files under topdir"""
files = set(self.makefiles("src/file1", "dest/file2", "src/sub/file3"))
makedir(self.tmpdir+'/empty/dir')
result = set(iter_files(self.tmpdir))
self.assertEqual(files, result)
def test_pattern(self):
"""iter_files: match filename against glob pattern"""
self.makefiles("src/file1.so", "src/sub.ko/file2")
goodfiles = set(self.makefiles("src/sub/file1.ko", "src/file2.ko.xz"))
result = set(iter_files(self.tmpdir, pattern="*.ko*"))
self.assertEqual(result, goodfiles)
class TestMoveFiles(FileTestCaseBase):
def test_basic(self):
"""move_files: move files to destdir"""
files = self.makefiles("src/file1", "src/subdir/file2")
move_files(files, self.destdir)
self.assertEqual(set(os.listdir(self.destdir)), set(["file1", "file2"]))
self.assertEqual(list(iter_files(self.srcdir)), [])
def test_overwrite(self):
"""move_files: overwrite files with the same name"""
src, dest = self.makefiles("src/file1", "dest/file1")
with open(src, 'w') as outf:
outf.write("srcfile")
with open(dest, 'w') as outf:
outf.write("destfile")
move_files([src], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "srcfile")
self.assertEqual(list(iter_files(self.srcdir)), [])
def test_samefile(self):
"""move_files: leave files alone if they're already in destdir"""
(dest,) = self.makefiles("dest/file1")
with open(dest, 'w') as outf:
outf.write("destfile")
move_files([dest], self.destdir)
self.assertEqual(os.listdir(self.destdir), ["file1"])
self.assertEqual(open(dest).read(), "destfile")
def test_move_to_parent(self):
"""move_files: leave files alone if they're in a subdir of destdir"""
files = set(self.makefiles("dest/subdir/file1", "dest/file2"))
move_files(files, self.destdir)
self.assertEqual(set(iter_files(self.destdir)), files)
class TestAppendLine(FileTestCaseBase):
def test_empty(self):
"""append_line: create file + append \\n when needed"""
line = "this is a line of text with no newline"
outfile = self.tmpdir+'/outfile'
append_line(outfile, line)
self.assertEqual(open(outfile).read(), line+'\n')
def test_append(self):
"""append_line: adds a line to the end of an existing file"""
oldlines = ["line one", "line two", "and I'm line three"]
outfile = self.tmpdir+'/outfile'
with open(outfile, 'w') as outf:
for line in oldlines:
outf.write(line+'\n')
line = "this line contains a newline already\n"
append_line(outfile, line)
self.assertEqual(open(outfile).read(), '\n'.join(oldlines+[line]))
from driver_updates import read_lines
class TestReadLine(FileTestCaseBase):
def test_empty(self):
"""read_lines: return [] for empty file"""
[empty] = self.makefiles("emptyfile")
self.assertEqual(read_lines(empty), [])
def test_missing(self):
"""read_lines: return [] for missing file"""
self.assertEqual(read_lines(self.tmpdir+'/no-such-file'),[])
def test_readlines(self):
"""read_lines: returns a list of lines without trailing newlines"""
filedata = 'line one\nline two\n\nline four\n'
outfile = self.tmpdir+'/outfile'
with open(outfile, 'w') as outf:
outf.write(filedata)
lines = read_lines(outfile)
self.assertEqual(lines, ['line one', 'line two','','line four'])
def test_readline_and_append_line(self):
"""read_lines: returns items as passed to append_line"""
filename = self.tmpdir+'/outfile'
items = ["one", "two", "five"]
for i in items:
append_line(filename, i)
self.assertEqual(items, read_lines(filename))
class TestMkdirSeq(FileTestCaseBase):
def test_basic(self):
"""mkdir_seq: first dir ends with 1"""
newdir = mkdir_seq(self.srcdir+'/DD-')
self.assertEqual(newdir, self.srcdir+'/DD-1')
self.assertTrue(os.path.isdir(newdir))
def test_one_exists(self):
"""mkdir_seq: increment number if file exists"""
firstdir = mkdir_seq(self.srcdir+'/DD-')
newdir = mkdir_seq(self.srcdir+'/DD-')
self.assertEqual(newdir, self.srcdir+'/DD-2')
self.assertTrue(os.path.isdir(newdir))
self.assertTrue(os.path.isdir(firstdir))
from driver_updates import find_repos, save_repo, ARCH
# As far as we know, this is what makes a valid repo: rhdd3 + rpms/`uname -m`/
def makerepo(topdir, desc=None):
descfile = makefile(topdir+'/rhdd3')
if not desc:
desc = os.path.basename(topdir)
with open(descfile, "w") as outf:
outf.write(desc+"\n")
makedir(topdir+'/rpms/'+ARCH)
class TestFindRepos(FileTestCaseBase):
def test_basic(self):
"""find_repos: return RPM dir if a valid repo is found"""
makerepo(self.tmpdir)
repos = find_repos(self.tmpdir)
self.assertEqual(repos, [self.tmpdir+'/rpms/'+ARCH])
self.assertTrue(os.path.isdir(repos[0]))
def test_multiple_subdirs(self):
"""find_repos: descend multiple subdirs if needed"""
makerepo(self.tmpdir+'/driver1')
makerepo(self.tmpdir+'/sub/driver1')
makerepo(self.tmpdir+'/sub/driver2')
repos = find_repos(self.tmpdir)
self.assertEqual(len(repos),3)
class TestSaveRepo(FileTestCaseBase):
def test_basic(self):
"""save_repo: copies a directory to /run/install/DD-X"""
makerepo(self.srcdir)
repo = find_repos(self.srcdir)[0]
makefile(repo+'/fake-something.rpm')
saved = save_repo(repo, target=self.destdir)
self.assertEqual(set(os.listdir(saved)), set(["fake-something.rpm"]))
self.assertEqual(saved, os.path.join(self.destdir, "DD-1"))
from driver_updates import mount, umount, mounted
class MountTestCase(unittest.TestCase):
@mock.patch('driver_updates.mkdir_seq')
@mock.patch('driver_updates.subprocess.check_call')
def test_mkdir(self, check_call, mkdir):
"""mount: makes mountpoint if needed"""
dev, mnt = '/dev/fake', '/media/DD-1'
mkdir.return_value = mnt
mountpoint = mount(dev)
mkdir.assert_called_once_with('/media/DD-')
check_call.assert_called_once_with(["mount", dev, mnt])
self.assertEqual(mnt, mountpoint)
@mock.patch('driver_updates.mkdir_seq')
@mock.patch('driver_updates.subprocess.check_call')
def test_basic(self, check_call, mkdir):
"""mount: calls mount(8) to mount a device/image"""
dev, mnt = '/dev/fake', '/media/fake'
mount(dev, mnt)
check_call.assert_called_once_with(["mount", dev, mnt])
self.assertFalse(mkdir.called)
@mock.patch('driver_updates.subprocess.call')
def test_umount(self, call):
"""umount: calls umount(8)"""
mnt = '/mnt/fake'
umount(mnt)
call.assert_called_once_with(["umount", mnt])
@mock.patch('driver_updates.mount')
@mock.patch('driver_updates.umount')
def test_mount_manager(self, mock_umount, mock_mount):
"""mounted: context manager mounts/umounts as expected"""
dev, mnt = '/dev/fake', '/media/fake'
mock_mount.return_value = mnt
with mounted(dev, mnt) as mountpoint:
mock_mount.assert_called_once_with(dev, mnt)
self.assertFalse(mock_umount.called)
self.assertEqual(mountpoint, mnt)
mock_umount.assert_called_once_with(mnt)
# NOTE: dd_list and dd_extract get tested pretty thoroughly in tests/dd_tests,
# so this is a slightly higher-level test case
from driver_updates import dd_list, dd_extract, Driver
fake_module = Driver(
source='/repo/path/to/fake-driver-1.0-1.rpm',
name='fake-driver',
flags='modules firmwares',
description='Wow this is totally a fake driver.\nHooray for this',
repo='/repo/path/to'
)
fake_enhancement = Driver(
source='/repo/path/to/fake-enhancement-1.0-1.rpm',
name='fake-enhancement',
flags='binaries libraries',
description='This is enhancing the crap out of the installer.\n\nYeah.',
repo=fake_module.repo
)
def dd_list_output(driver):
out='{0.source}\n{0.name}\n{0.flags}\n{0.description}\n---\n'.format(driver)
return out.encode('utf-8')
class DDUtilsTestCase(unittest.TestCase):
@mock.patch("driver_updates.subprocess.check_output")
def test_dd_list(self, check_output):
"""dd_list: returns a list of Driver objects parsed from output"""
output = dd_list_output(fake_module)+dd_list_output(fake_enhancement)
check_output.return_value = output
anaconda, kernel = '19.0', os.uname()[2]
result = dd_list(fake_module.repo)
cmd = check_output.call_args[0][0]
self.assertIn(kernel, cmd)
self.assertIn(anaconda, cmd)
self.assertIn(fake_module.repo, cmd)
self.assertTrue(cmd[0].endswith("dd_list"))
self.assertEqual(len(result), 2)
mod, enh = sorted(result, key=lambda d: d.name)
self.assertEqual(mod.__dict__, fake_module.__dict__)
self.assertEqual(enh.__dict__, fake_enhancement.__dict__)
@mock.patch("driver_updates.subprocess.check_output")
def test_dd_extract(self, check_output):
"""dd_extract: call binary with expected arguments"""
rpm = "/some/kind/of/path.rpm"
outdir = "/output/dir"
dd_extract(rpm, outdir)
cmd = check_output.call_args[0][0]
self.assertIn(os.uname()[2], cmd)
self.assertIn(rpm, cmd)
self.assertIn(outdir, cmd)
self.assertIn("-blmf", cmd)
self.assertTrue(cmd[0].endswith("dd_extract"))
from driver_updates import extract_drivers, grab_driver_files, load_drivers
@mock.patch("driver_updates.ensure_dir")
@mock.patch("driver_updates.save_repo")
@mock.patch("driver_updates.append_line")
@mock.patch("driver_updates.dd_extract")
class ExtractDriversTestCase(unittest.TestCase):
def test_drivers(self, mock_extract, mock_append, mock_save, *args):
"""extract_drivers: save repo, write pkglist"""
extract_drivers(drivers=[fake_enhancement, fake_module])
# extracts all listed modules
mock_extract.assert_has_calls([
mock.call(fake_enhancement.source, "/updates"),
mock.call(fake_module.source, "/updates")
], any_order=True)
pkglist = "/run/install/dd_packages"
mock_append.assert_called_once_with(pkglist, fake_module.name)
mock_save.assert_called_once_with(fake_module.repo)
def test_enhancements(self, mock_extract, mock_append, mock_save, *args):
"""extract_drivers: extract selected drivers, don't save enhancements"""
extract_drivers(drivers=[fake_enhancement])
mock_extract.assert_called_once_with(
fake_enhancement.source, "/updates"
)
self.assertFalse(mock_append.called)
self.assertFalse(mock_save.called)
def test_repo(self, mock_extract, mock_append, mock_save, *args):
"""extract_drivers(repos=[...]) extracts all drivers from named repos"""
with mock.patch("driver_updates.dd_list", side_effect=[
[fake_enhancement],
[fake_enhancement, fake_module]]):
extract_drivers(repos=['enh_repo', 'mod_repo'])
mock_extract.assert_has_calls([
mock.call(fake_enhancement.source, "/updates"),
mock.call(fake_enhancement.source, "/updates"),
mock.call(fake_module.source, "/updates")
])
pkglist = "/run/install/dd_packages"
mock_append.assert_called_once_with(pkglist, fake_module.name)
mock_save.assert_called_once_with(fake_module.repo)
class GrabDriverFilesTestCase(FileTestCaseBase):
def test_basic(self):
"""grab_driver_files: copy drivers into place, return module list"""
# create a bunch of fake extracted files
outdir = self.tmpdir + '/extract-outdir'
moddir = outdir + "/lib/modules/%s/kernel/" % os.uname()[2]
fwdir = outdir + "/lib/firmware/"
modules = makefiles(moddir+"net/funk.ko", moddir+"fs/lolfs.ko.xz")
firmware = makefiles(fwdir+"funk.fw")
makefiles(outdir+"/usr/bin/monkey", outdir+"/other/dir/blah.ko")
mod_upd_dir = self.tmpdir+'/module-updates'
fw_upd_dir = self.tmpdir+'/fw-updates'
# use our updates dirs instead of the default updates dirs
with mock.patch.multiple("driver_updates",
MODULE_UPDATES_DIR=mod_upd_dir,
FIRMWARE_UPDATES_DIR=fw_upd_dir):
modnames = grab_driver_files(outdir)
self.assertEqual(set(modnames), set(["funk", "lolfs"]))
modfiles = set(['funk.ko', 'lolfs.ko.xz'])
fwfiles = set(['funk.fw'])
# modules/firmware are *not* in their old locations
self.assertEqual([f for f in modules+firmware if os.path.exists(f)], [])
# modules are in the system's updates dir
self.assertEqual(set(os.listdir(mod_upd_dir)), modfiles)
# modules are also in outdir's updates dir
self.assertEqual(set(os.listdir(outdir+'/'+mod_upd_dir)), modfiles)
# repeat for firmware
self.assertEqual(set(os.listdir(fw_upd_dir)), fwfiles)
self.assertEqual(set(os.listdir(outdir+'/'+fw_upd_dir)), fwfiles)
class LoadDriversTestCase(unittest.TestCase):
@mock.patch("driver_updates.subprocess.call")
def test_basic(self, call):
"""load_drivers: runs depmod and modprobes all named modules"""
modnames = ['mod1', 'mod2']
load_drivers(modnames)
call.assert_has_calls([
mock.call(["depmod", "-a"]),
mock.call(["modprobe", "-a"] + modnames)
])
from driver_updates import process_driver_disk
class ProcessDriverDiskTestCase(unittest.TestCase):
def setUp(self):
# an iterable that returns fake mountpoints, for mocking mount()
self.fakemount = ["/mnt/DD-%i" % n for n in range(1,10)]
# an iterable that returns fake repos, for mocking find_repos()
self.frepo = {
'/mnt/DD-1': ['/mnt/DD-1/repo1'],
'/mnt/DD-2': ['/mnt/DD-2/repo1', '/mnt/DD-2/repo2'],
}
# fake iso listings for iso_dir
self.fiso = {
'/mnt/DD-1': [],
'/mnt/DD-2': [],
'/mnt/DD-3': [],
}
# a context-manager object to be returned by the mock mounted()
mounted_ctx = mock.MagicMock(
__enter__=mock.MagicMock(side_effect=self.fakemount), # mount
__exit__=mock.MagicMock(return_value=None), # umount
)
self.modlist = []
# set up our patches
patches = (
mock.patch("driver_updates.mounted", return_value=mounted_ctx),
mock.patch("driver_updates.find_repos", side_effect=self.frepo.get),
mock.patch("driver_updates.find_isos", side_effect=self.fiso.get),
mock.patch("driver_updates.extract_drivers", return_value=True),
mock.patch("driver_updates.load_drivers"),
mock.patch('driver_updates.grab_driver_files',
side_effect=lambda: self.modlist),
)
self.mocks = {p.attribute:p.start() for p in patches}
for p in patches: self.addCleanup(p.stop)
def test_basic(self):
"""process_driver_disk: mount disk, extract RPMs, grab + load drivers"""
dev = '/dev/fake'
process_driver_disk(dev)
# did we mount the initial device, and then the .iso we find therein?
self.mocks['mounted'].assert_called_once_with(dev)
self.mocks['extract_drivers'].assert_called_once_with(repos=self.frepo['/mnt/DD-1'])
self.mocks['grab_driver_files'].assert_called_once_with()
self.mocks['load_drivers'].assert_called_once_with(self.modlist)
def test_recursive(self):
"""process_driver_disk: recursively process .isos at toplevel"""
dev = '/dev/fake'
# first mount has no repos, but an iso
self.frepo['/mnt/DD-1'] = []
self.fiso['/mnt/DD-1'].append('magic.iso')
self.fiso['/mnt/DD-2'].append('ignored.iso')
process_driver_disk(dev)
# did we mount the initial device, and the iso therein?
# also: we ignore ignored.iso because magic.iso is a proper DD
self.mocks['mounted'].assert_has_calls([
mock.call(dev), mock.call('magic.iso')
])
# we extracted drivers from the repo(s) in magic.iso
self.mocks['extract_drivers'].assert_called_once_with(repos=self.frepo['/mnt/DD-2'])
self.mocks['grab_driver_files'].assert_called_once_with()
self.mocks['load_drivers'].assert_called_once_with(self.modlist)
def test_no_drivers(self):
"""process_driver_disk: don't run depmod etc. if no new drivers"""
dev = '/dev/fake'
self.mocks['extract_drivers'].return_value = False
process_driver_disk(dev)
self.assertFalse(self.mocks['grab_driver_files'].called)
self.assertFalse(self.mocks['load_drivers'].called)
from driver_updates import process_driver_rpm
class ProcessDriverRPMTestCase(unittest.TestCase):
def setUp(self):
self.frepo = {
'/tmp/fake': ['/mnt/DD-1'],
}
self.modlist = []
# set up our patches
patches = (
mock.patch("driver_updates.find_repos", side_effect=self.frepo.get),
mock.patch("driver_updates.extract_drivers", return_value=True),
mock.patch("driver_updates.load_drivers"),
mock.patch('driver_updates.grab_driver_files',
side_effect=lambda: self.modlist),
)
self.mocks = {p.attribute:p.start() for p in patches}
for p in patches: self.addCleanup(p.stop)
def test_basic(self):
"""process_driver_rpm: extract RPM, grab + load driver"""
rpm = '/tmp/fake/driver.rpm'
process_driver_rpm(rpm)
self.mocks['extract_drivers'].assert_called_once_with(repos=["/tmp/fake"])
self.mocks['grab_driver_files'].assert_called_once_with()
self.mocks['load_drivers'].assert_called_once_with(self.modlist)
from driver_updates import finish, mark_finished, all_finished
class FinishedTestCase(FileTestCaseBase):
def test_mark_finished(self):
"""mark_finished: appends a line to /tmp/dd_finished"""
requeststr = "WOW SOMETHING OR OTHER"
mark_finished(requeststr, topdir=self.tmpdir)
finished = self.tmpdir+'/dd_finished'
self.assertTrue(os.path.exists(finished))
self.assertEqual(read_lines(finished), [requeststr])
def test_all_finished(self):
"""all_finished: True if all lines from dd_todo are in dd_finished"""
todo = self.tmpdir+'/dd_todo'
requests = ['one', 'two', 'final thingy']
with open(todo, 'w') as outf:
outf.write(''.join(r+'\n' for r in requests))
self.assertEqual(set(read_lines(todo)), set(requests))
for r in reversed(requests):
self.assertFalse(all_finished(topdir=self.tmpdir))
mark_finished(r, topdir=self.tmpdir)
self.assertTrue(all_finished(topdir=self.tmpdir))
def test_extra_finished(self):
"""all_finished: True if dd_finished has more items than dd_todo"""
self.test_all_finished()
mark_finished("BONUS", topdir=self.tmpdir)
self.assertTrue(all_finished(topdir=self.tmpdir))
def test_finish(self):
"""finish: mark request finished, and write dd.done if all complete"""
todo = self.tmpdir+'/dd_todo'
done = self.tmpdir+'/dd.done'
requests = ['one', 'two', 'final thingy']
with open(todo, 'w') as outf:
outf.write(''.join(r+'\n' for r in requests))
for r in reversed(requests):
print("marking %s" % r)
self.assertFalse(os.path.exists(done))
finish(r, topdir=self.tmpdir)
self.assertTrue(os.path.exists(done))
from driver_updates import get_deviceinfo, DeviceInfo
blkid_out = b'''\
DEVNAME=/dev/sda2
UUID=0f21a3d1-dcd3-4ab4-a292-c5556850d561
TYPE=ext4
DEVNAME=/dev/sda1
UUID=C53C-EE46
TYPE=vfat
DEVNAME=/dev/sda3
UUID=4126dbb6-c7d3-47b4-b1fc-9bb461df0067
TYPE=btrfs
DEVNAME=/dev/loop0
UUID=6f16967e-0388-4276-bd8d-b88e5b217a55
TYPE=ext4
'''
disk_labels = {
'/dev/sdb1': 'metroid_srv',
'/dev/loop0': 'I\\x20\u262d\\x20COMMUNISM',
'/dev/sda3': 'metroid_root'
}
devicelist = [
DeviceInfo(DEVNAME='/dev/sda2', TYPE='ext4',
UUID='0f21a3d1-dcd3-4ab4-a292-c5556850d561'),
DeviceInfo(DEVNAME='/dev/sda1', TYPE='vfat',
UUID='C53C-EE46'),
DeviceInfo(DEVNAME='/dev/sda3', TYPE='btrfs', LABEL='metroid_root',
UUID='4126dbb6-c7d3-47b4-b1fc-9bb461df0067'),
DeviceInfo(DEVNAME='/dev/loop0', TYPE='ext4',
LABEL='I\\x20\u262d\\x20COMMUNISM',
UUID='6f16967e-0388-4276-bd8d-b88e5b217a55'),
]
# also covers blkid, get_disk_labels, DeviceInfo
class DeviceInfoTestCase(unittest.TestCase):
@mock.patch('driver_updates.subprocess.check_output',return_value=blkid_out)
@mock.patch('driver_updates.get_disk_labels',return_value=disk_labels)
def test_basic(self, get_disk_labels, check_output):
"""get_deviceinfo: parses DeviceInfo from blkid etc."""
disks = get_deviceinfo()
self.assertEqual(len(disks), 4)
disks.sort(key=lambda d: d.device)
loop, efi, boot, root = disks
self.assertEqual(vars(boot), vars(devicelist[0]))
self.assertEqual(vars(efi), vars(devicelist[1]))
self.assertEqual(vars(root), vars(devicelist[2]))
self.assertEqual(vars(loop), vars(devicelist[3]))
def test_shortdev(self):
d = DeviceInfo(DEVNAME="/dev/disk/by-label/OEMDRV")
with mock.patch("os.path.realpath", return_value="/dev/i2o/hdb"):
self.assertEqual(d.shortdev, "i2o/hdb")
# TODO: test TextMenu itself
# py2/3 compat
if sys.version_info.major == 3:
from io import StringIO
else:
from io import BytesIO as StringIO
from driver_updates import device_menu
class DeviceMenuTestCase(unittest.TestCase):
def setUp(self):
patches = (
mock.patch('driver_updates.get_deviceinfo',return_value=devicelist),
)
self.mocks = {p.attribute:p.start() for p in patches}
for p in patches: self.addCleanup(p.stop)
def test_device_menu_exit(self):
"""device_menu: 'c' exits the menu"""
with mock.patch('driver_updates._input', side_effect=['c']):
dev = device_menu()
self.assertEqual(dev, [])
self.assertEqual(self.mocks['get_deviceinfo'].call_count, 1)
def test_device_menu_refresh(self):
"""device_menu: 'r' makes the menu refresh"""
with mock.patch('driver_updates._input', side_effect=['r','c']):
device_menu()
self.assertEqual(self.mocks['get_deviceinfo'].call_count, 2)
@mock.patch("sys.stdout", new_callable=StringIO)
def test_device_menu(self, stdout):
"""device_menu: choosing a number returns that Device"""
choose_num='2'
with mock.patch('driver_updates._input', return_value=choose_num):
result = device_menu()
# if you hit '2' you should get the corresponding device from the list
self.assertEqual(len(result), 1)
dev = result[0]
self.assertEqual(vars(dev), vars(devicelist[int(choose_num)-1]))
# find the corresponding line on-screen
screen = [l.strip() for l in stdout.getvalue().splitlines()]
match = [l for l in screen if l.startswith(choose_num+')')]
self.assertEqual(len(match), 1)
line = match.pop(0)
# the device name (at least) should be on this line
self.assertIn(os.path.basename(dev.device), line)
| gpl-2.0 | 3,357,665,415,573,174,000 | -2,124,279,783,181,482,200 | 40.428354 | 92 | 0.626743 | false |
airbnb/streamalert | tests/unit/streamalert/shared/test_utils.py | 1 | 4360 | """Tests for streamalert/shared/utils.py"""
import json
from nose.tools import assert_equal, assert_false
from streamalert.shared import utils
from streamalert.shared.normalize import Normalizer
MOCK_RECORD_ID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
def test_valid_ip():
"""Utils - Valid IP"""
test_ip_valid = '127.0.0.1'
assert_equal(utils.valid_ip(test_ip_valid), True)
test_ip_invalid = 'test [1234]'
assert_equal(utils.valid_ip(test_ip_invalid), False)
def test_in_network_invalid_ip():
"""Utils - In Network - Invalid IP"""
assert_false(utils.in_network('a string that is not an ip', {'10.0.100.0/24'}))
def test_in_network_invalid_cidr():
"""Utils - In Network - Invalid CIDR"""
assert_false(utils.in_network('127.0.0.1', {'not a cidr'}))
def test_in_network():
"""Utils - In Network"""
cidrs = {
'10.0.16.0/24',
'10.0.17.0/24'
}
ip_in_cidr = '10.0.16.24'
assert_equal(utils.in_network(ip_in_cidr, cidrs), True)
ip_not_in_cidr = '10.0.15.24'
assert_equal(utils.in_network(ip_not_in_cidr, cidrs), False)
def test_get_first_key():
"""Utils - Get First Key"""
data = {
'path': 'ABC',
'details': {
'parent': {
'path': 'DEF',
}
},
'empty_dict': {},
'empty_list': [],
'events': [
{
'path': 'GHI'
}
]
}
# 'path' is a top-level key and so should always be returned first
assert_equal('ABC', utils.get_first_key(data, 'path'))
# dicts and lists can be returned as well
assert_equal(data['details'], utils.get_first_key(data, 'details'))
# None is returned by default if no value is found
assert_equal(None, utils.get_first_key(data, 'no-key-found'))
# Custom default value is returned if specified
assert_equal({}, utils.get_first_key(data, 'no-key-found', {}))
def test_get_keys():
"""Utils - Get Keys"""
data = {
'path': 'ABC',
'details': {
'parent': {
'path': 'DEF'
}
},
'empty_dict': {},
'empty_list': [],
'events': [
{
'path': 'GHI'
}
]
}
assert_equal({'ABC', 'DEF', 'GHI'}, set(utils.get_keys(data, 'path')))
assert_equal(2, len(utils.get_keys(data, 'path', max_matches=2)))
assert_equal([], utils.get_keys({}, 'path'))
def generate_categorized_records(normalized=False, count=2):
"""Generate categorized records by source types"""
json_data = [
{'key_{}'.format(cnt): 'value_{}'.format(cnt)} for cnt in range(count)
]
if normalized:
for data in json_data:
data[Normalizer.NORMALIZATION_KEY] = {
'normalized_type1': [
{
'values': ['value1'],
'function': None
}
],
'normalized_type2': [
{
'values': ['value2', 'value3'],
'function': None,
'send_to_artifacts': True
}
],
'normalized_type3': [
{
'values': ['value2', 'value3'],
'function': None,
'send_to_artifacts': False
}
]
}
return {
'log_type_01_sub_type_01': json_data
}
def generate_artifacts(firehose_records=False):
"""Generate sample artifacts for unit tests"""
normalized_values = [
('normalized_type1', 'value1'),
('normalized_type2', 'value2'),
('normalized_type2', 'value3'),
('normalized_type1', 'value1'),
('normalized_type2', 'value2'),
('normalized_type2', 'value3')
]
artifacts = [
{
'function': 'None',
'streamalert_record_id': MOCK_RECORD_ID,
'source_type': 'log_type_01_sub_type_01',
'type': type,
'value': value
} for type, value in normalized_values
]
if firehose_records:
return [
json.dumps(artifact, separators=(',', ':')) + '\n' for artifact in artifacts
]
return artifacts
| apache-2.0 | 163,318,814,499,729,500 | 6,340,123,407,101,030,000 | 26.948718 | 88 | 0.499771 | false |
vertcoin/vertcoin | test/functional/rpc_users.py | 6 | 4247 | #!/usr/bin/env python3
# Copyright (c) 2015-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiple RPC users."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
get_datadir_path,
str_to_b64str,
)
import os
import http.client
import urllib.parse
import subprocess
from random import SystemRandom
import string
import configparser
import sys
def call_with_auth(node, user, password):
url = urllib.parse.urlparse(node.url)
headers = {"Authorization": "Basic " + str_to_b64str('{}:{}'.format(user, password))}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
conn.close()
return resp
class HTTPBasicsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.supports_cli = False
def setup_chain(self):
super().setup_chain()
#Append rpcauth to bitcoin.conf before initialization
self.rtpassword = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
self.rpcuser = "rpcuser💻"
self.rpcpassword = "rpcpassword🔑"
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
gen_rpcauth = config['environment']['RPCAUTH']
# Generate RPCAUTH with specified password
self.rt2password = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
p = subprocess.Popen([sys.executable, gen_rpcauth, 'rt2', self.rt2password], stdout=subprocess.PIPE, universal_newlines=True)
lines = p.stdout.read().splitlines()
rpcauth2 = lines[1]
# Generate RPCAUTH without specifying password
self.user = ''.join(SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(10))
p = subprocess.Popen([sys.executable, gen_rpcauth, self.user], stdout=subprocess.PIPE, universal_newlines=True)
lines = p.stdout.read().splitlines()
rpcauth3 = lines[1]
self.password = lines[3]
with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
f.write(rpcauth3+"\n")
with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write("rpcuser={}\n".format(self.rpcuser))
f.write("rpcpassword={}\n".format(self.rpcpassword))
def test_auth(self, node, user, password):
self.log.info('Correct...')
assert_equal(200, call_with_auth(node, user, password).status)
self.log.info('Wrong...')
assert_equal(401, call_with_auth(node, user, password+'wrong').status)
self.log.info('Wrong...')
assert_equal(401, call_with_auth(node, user+'wrong', password).status)
self.log.info('Wrong...')
assert_equal(401, call_with_auth(node, user+'wrong', password+'wrong').status)
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
self.test_auth(self.nodes[0], url.username, url.password)
self.test_auth(self.nodes[0], 'rt', self.rtpassword)
self.test_auth(self.nodes[0], 'rt2', self.rt2password)
self.test_auth(self.nodes[0], self.user, self.password)
###############################################################
# Check correctness of the rpcuser/rpcpassword config options #
###############################################################
url = urllib.parse.urlparse(self.nodes[1].url)
self.test_auth(self.nodes[1], self.rpcuser, self.rpcpassword)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| mit | -201,482,692,785,021,280 | 9,115,013,815,010,340,000 | 38.635514 | 133 | 0.624853 | false |
Paczesiowa/youtube-dl | youtube_dl/extractor/rtve.py | 58 | 7807 | # encoding: utf-8
from __future__ import unicode_literals
import base64
import re
import time
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
ExtractorError,
float_or_none,
remove_end,
std_headers,
struct_unpack,
)
def _decrypt_url(png):
encrypted_data = base64.b64decode(png.encode('utf-8'))
text_index = encrypted_data.find(b'tEXt')
text_chunk = encrypted_data[text_index - 4:]
length = struct_unpack('!I', text_chunk[:4])[0]
# Use bytearray to get integers when iterating in both python 2.x and 3.x
data = bytearray(text_chunk[8:8 + length])
data = [chr(b) for b in data if b != 0]
hash_index = data.index('#')
alphabet_data = data[:hash_index]
url_data = data[hash_index + 1:]
alphabet = []
e = 0
d = 0
for l in alphabet_data:
if d == 0:
alphabet.append(l)
d = e = (e + 1) % 4
else:
d -= 1
url = ''
f = 0
e = 3
b = 1
for letter in url_data:
if f == 0:
l = int(letter) * 10
f = 1
else:
if e == 0:
l += int(letter)
url += alphabet[l]
e = (b + 3) % 4
f = 0
b += 1
else:
e -= 1
return url
class RTVEALaCartaIE(InfoExtractor):
IE_NAME = 'rtve.es:alacarta'
IE_DESC = 'RTVE a la carta'
_VALID_URL = r'http://www\.rtve\.es/(m/)?alacarta/videos/[^/]+/[^/]+/(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.rtve.es/alacarta/videos/balonmano/o-swiss-cup-masculina-final-espana-suecia/2491869/',
'md5': '1d49b7e1ca7a7502c56a4bf1b60f1b43',
'info_dict': {
'id': '2491869',
'ext': 'mp4',
'title': 'Balonmano - Swiss Cup masculina. Final: España-Suecia',
'duration': 5024.566,
},
}, {
'note': 'Live stream',
'url': 'http://www.rtve.es/alacarta/videos/television/24h-live/1694255/',
'info_dict': {
'id': '1694255',
'ext': 'flv',
'title': 'TODO',
},
'skip': 'The f4m manifest can\'t be used yet',
}, {
'url': 'http://www.rtve.es/m/alacarta/videos/cuentame-como-paso/cuentame-como-paso-t16-ultimo-minuto-nuestra-vida-capitulo-276/2969138/?media=tve',
'only_matching': True,
}]
def _real_initialize(self):
user_agent_b64 = base64.b64encode(std_headers['User-Agent'].encode('utf-8')).decode('utf-8')
manager_info = self._download_json(
'http://www.rtve.es/odin/loki/' + user_agent_b64,
None, 'Fetching manager info')
self._manager = manager_info['manager']
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
info = self._download_json(
'http://www.rtve.es/api/videos/%s/config/alacarta_videos.json' % video_id,
video_id)['page']['items'][0]
if info['state'] == 'DESPU':
raise ExtractorError('The video is no longer available', expected=True)
png_url = 'http://www.rtve.es/ztnr/movil/thumbnail/%s/videos/%s.png' % (self._manager, video_id)
png = self._download_webpage(png_url, video_id, 'Downloading url information')
video_url = _decrypt_url(png)
if not video_url.endswith('.f4m'):
auth_url = video_url.replace(
'resources/', 'auth/resources/'
).replace('.net.rtve', '.multimedia.cdn.rtve')
video_path = self._download_webpage(
auth_url, video_id, 'Getting video url')
# Use mvod1.akcdn instead of flash.akamaihd.multimedia.cdn to get
# the right Content-Length header and the mp4 format
video_url = compat_urlparse.urljoin(
'http://mvod1.akcdn.rtve.es/', video_path)
subtitles = None
if info.get('sbtFile') is not None:
subtitles = self.extract_subtitles(video_id, info['sbtFile'])
return {
'id': video_id,
'title': info['title'],
'url': video_url,
'thumbnail': info.get('image'),
'page_url': url,
'subtitles': subtitles,
'duration': float_or_none(info.get('duration'), scale=1000),
}
def _get_subtitles(self, video_id, sub_file):
subs = self._download_json(
sub_file + '.json', video_id,
'Downloading subtitles info')['page']['items']
return dict(
(s['lang'], [{'ext': 'vtt', 'url': s['src']}])
for s in subs)
class RTVEInfantilIE(InfoExtractor):
IE_NAME = 'rtve.es:infantil'
IE_DESC = 'RTVE infantil'
_VALID_URL = r'https?://(?:www\.)?rtve\.es/infantil/serie/(?P<show>[^/]*)/video/(?P<short_title>[^/]*)/(?P<id>[0-9]+)/'
_TESTS = [{
'url': 'http://www.rtve.es/infantil/serie/cleo/video/maneras-vivir/3040283/',
'md5': '915319587b33720b8e0357caaa6617e6',
'info_dict': {
'id': '3040283',
'ext': 'mp4',
'title': 'Maneras de vivir',
'thumbnail': 'http://www.rtve.es/resources/jpg/6/5/1426182947956.JPG',
'duration': 357.958,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
info = self._download_json(
'http://www.rtve.es/api/videos/%s/config/alacarta_videos.json' % video_id,
video_id)['page']['items'][0]
webpage = self._download_webpage(url, video_id)
vidplayer_id = self._search_regex(
r' id="vidplayer([0-9]+)"', webpage, 'internal video ID')
png_url = 'http://www.rtve.es/ztnr/movil/thumbnail/default/videos/%s.png' % vidplayer_id
png = self._download_webpage(png_url, video_id, 'Downloading url information')
video_url = _decrypt_url(png)
return {
'id': video_id,
'ext': 'mp4',
'title': info['title'],
'url': video_url,
'thumbnail': info.get('image'),
'duration': float_or_none(info.get('duration'), scale=1000),
}
class RTVELiveIE(InfoExtractor):
IE_NAME = 'rtve.es:live'
IE_DESC = 'RTVE.es live streams'
_VALID_URL = r'http://www\.rtve\.es/(?:deportes/directo|noticias|television)/(?P<id>[a-zA-Z0-9-]+)'
_TESTS = [{
'url': 'http://www.rtve.es/noticias/directo-la-1/',
'info_dict': {
'id': 'directo-la-1',
'ext': 'flv',
'title': 're:^La 1 de TVE [0-9]{4}-[0-9]{2}-[0-9]{2}Z[0-9]{6}$',
},
'params': {
'skip_download': 'live stream',
}
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
start_time = time.gmtime()
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
player_url = self._search_regex(
r'<param name="movie" value="([^"]+)"/>', webpage, 'player URL')
title = remove_end(self._og_search_title(webpage), ' en directo')
title += ' ' + time.strftime('%Y-%m-%dZ%H%M%S', start_time)
vidplayer_id = self._search_regex(
r' id="vidplayer([0-9]+)"', webpage, 'internal video ID')
png_url = 'http://www.rtve.es/ztnr/movil/thumbnail/default/videos/%s.png' % vidplayer_id
png = self._download_webpage(png_url, video_id, 'Downloading url information')
video_url = _decrypt_url(png)
return {
'id': video_id,
'ext': 'flv',
'title': title,
'url': video_url,
'app': 'rtve-live-live?ovpfv=2.1.2',
'player_url': player_url,
'rtmp_live': True,
}
| unlicense | -695,596,690,386,685,200 | 6,341,307,644,768,624,000 | 33.848214 | 155 | 0.531258 | false |
PRIMEDesigner15/PRIMEDesigner15 | dependencies/Lib/test/unittests/test_threading_local.py | 167 | 6339 | import unittest
from doctest import DocTestSuite
from test import support
import weakref
import gc
# Modules under test
_thread = support.import_module('_thread')
threading = support.import_module('threading')
import _threading_local
class Weak(object):
pass
def target(local, weaklist):
weak = Weak()
local.weak = weak
weaklist.append(weakref.ref(weak))
class BaseLocalTest:
def test_local_refs(self):
self._local_refs(20)
self._local_refs(50)
self._local_refs(100)
def _local_refs(self, n):
local = self._local()
weaklist = []
for i in range(n):
t = threading.Thread(target=target, args=(local, weaklist))
t.start()
t.join()
del t
gc.collect()
self.assertEqual(len(weaklist), n)
# XXX _threading_local keeps the local of the last stopped thread alive.
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n))
# Assignment to the same thread local frees it sometimes (!)
local.someothervar = None
gc.collect()
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n), (n, len(deadlist)))
def test_derived(self):
# Issue 3088: if there is a threads switch inside the __init__
# of a threading.local derived class, the per-thread dictionary
# is created but not correctly set on the object.
# The first member set may be bogus.
import time
class Local(self._local):
def __init__(self):
time.sleep(0.01)
local = Local()
def f(i):
local.x = i
# Simply check that the variable is correctly set
self.assertEqual(local.x, i)
threads= []
for i in range(10):
t = threading.Thread(target=f, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
def test_derived_cycle_dealloc(self):
# http://bugs.python.org/issue6990
class Local(self._local):
pass
locals = None
passed = False
e1 = threading.Event()
e2 = threading.Event()
def f():
nonlocal passed
# 1) Involve Local in a cycle
cycle = [Local()]
cycle.append(cycle)
cycle[0].foo = 'bar'
# 2) GC the cycle (triggers threadmodule.c::local_clear
# before local_dealloc)
del cycle
gc.collect()
e1.set()
e2.wait()
# 4) New Locals should be empty
passed = all(not hasattr(local, 'foo') for local in locals)
t = threading.Thread(target=f)
t.start()
e1.wait()
# 3) New Locals should recycle the original's address. Creating
# them in the thread overwrites the thread state and avoids the
# bug
locals = [Local() for i in range(10)]
e2.set()
t.join()
self.assertTrue(passed)
def test_arguments(self):
# Issue 1522237
class MyLocal(self._local):
def __init__(self, *args, **kwargs):
pass
MyLocal(a=1)
MyLocal(1)
self.assertRaises(TypeError, self._local, a=1)
self.assertRaises(TypeError, self._local, 1)
def _test_one_class(self, c):
self._failed = "No error message set or cleared."
obj = c()
e1 = threading.Event()
e2 = threading.Event()
def f1():
obj.x = 'foo'
obj.y = 'bar'
del obj.y
e1.set()
e2.wait()
def f2():
try:
foo = obj.x
except AttributeError:
# This is expected -- we haven't set obj.x in this thread yet!
self._failed = "" # passed
else:
self._failed = ('Incorrectly got value %r from class %r\n' %
(foo, c))
sys.stderr.write(self._failed)
t1 = threading.Thread(target=f1)
t1.start()
e1.wait()
t2 = threading.Thread(target=f2)
t2.start()
t2.join()
# The test is done; just let t1 know it can exit, and wait for it.
e2.set()
t1.join()
self.assertFalse(self._failed, self._failed)
def test_threading_local(self):
self._test_one_class(self._local)
def test_threading_local_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_one_class(LocalSubclass)
def _test_dict_attribute(self, cls):
obj = cls()
obj.x = 5
self.assertEqual(obj.__dict__, {'x': 5})
with self.assertRaises(AttributeError):
obj.__dict__ = {}
with self.assertRaises(AttributeError):
del obj.__dict__
def test_dict_attribute(self):
self._test_dict_attribute(self._local)
def test_dict_attribute_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_dict_attribute(LocalSubclass)
def test_cycle_collection(self):
class X:
pass
x = X()
x.local = self._local()
x.local.x = x
wr = weakref.ref(x)
del x
gc.collect()
self.assertIs(wr(), None)
class ThreadLocalTest(unittest.TestCase, BaseLocalTest):
_local = _thread._local
class PyThreadingLocalTest(unittest.TestCase, BaseLocalTest):
_local = _threading_local.local
def test_main():
suite = unittest.TestSuite()
suite.addTest(DocTestSuite('_threading_local'))
suite.addTest(unittest.makeSuite(ThreadLocalTest))
suite.addTest(unittest.makeSuite(PyThreadingLocalTest))
local_orig = _threading_local.local
def setUp(test):
_threading_local.local = _thread._local
def tearDown(test):
_threading_local.local = local_orig
suite.addTest(DocTestSuite('_threading_local',
setUp=setUp, tearDown=tearDown)
)
support.run_unittest(suite)
if __name__ == '__main__':
test_main()
| bsd-3-clause | -6,008,792,598,114,855,000 | -4,155,050,324,232,318,500 | 27.173333 | 80 | 0.553557 | false |
mephizzle/wagtail | wagtail/wagtailredirects/models.py | 8 | 2622 | from __future__ import unicode_literals
from django.db import models
from django.utils.six.moves.urllib.parse import urlparse
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel, MultiFieldPanel, PageChooserPanel
class Redirect(models.Model):
old_path = models.CharField(verbose_name=_("Redirect from"), max_length=255, unique=True, db_index=True)
site = models.ForeignKey('wagtailcore.Site', verbose_name=_('Site'), null=True, blank=True, related_name='redirects', db_index=True, editable=False)
is_permanent = models.BooleanField(verbose_name=_("Permanent"), default=True, help_text=_("Recommended. Permanent redirects ensure search engines forget the old page (the 'Redirect from') and index the new page instead."))
redirect_page = models.ForeignKey('wagtailcore.Page', verbose_name=_("Redirect to a page"), null=True, blank=True)
redirect_link = models.URLField(verbose_name=_("Redirect to any URL"), blank=True)
@property
def title(self):
return self.old_path
@property
def link(self):
if self.redirect_page:
return self.redirect_page.url
else:
return self.redirect_link
def get_is_permanent_display(self):
if self.is_permanent:
return "permanent"
else:
return "temporary"
@classmethod
def get_for_site(cls, site=None):
if site:
return cls.objects.filter(models.Q(site=site) | models.Q(site=None))
else:
return cls.objects.all()
@staticmethod
def normalise_path(url):
# Parse url
url_parsed = urlparse(url)
# Path must start with / but not end with /
path = url_parsed[2]
if not path.startswith('/'):
path = '/' + path
if path.endswith('/'):
path = path[:-1]
# Query string components must be sorted alphabetically
query_string = url_parsed[4]
query_string_components = query_string.split('&')
query_string = '&'.join(sorted(query_string_components))
# Add query string to path
if query_string:
path = path + '?' + query_string
return path
def clean(self):
# Normalise old path
self.old_path = Redirect.normalise_path(self.old_path)
class Meta:
verbose_name = _('Redirect')
Redirect.content_panels = [
MultiFieldPanel([
FieldPanel('old_path'),
FieldPanel('is_permanent'),
PageChooserPanel('redirect_page'),
FieldPanel('redirect_link'),
])
]
| bsd-3-clause | 1,895,054,319,582,595,000 | -3,579,796,375,296,994,300 | 32.189873 | 226 | 0.636537 | false |
sivel/ansible-modules-extras | cloud/amazon/ec2_vpc_dhcp_options.py | 23 | 15072 | #!/usr/bin/python
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = """
---
module: ec2_vpc_dhcp_options
short_description: Manages DHCP Options, and can ensure the DHCP options for the given VPC match what's
requested
description:
- This module removes, or creates DHCP option sets, and can associate them to a VPC.
Optionally, a new DHCP Options set can be created that converges a VPC's existing
DHCP option set with values provided.
When dhcp_options_id is provided, the module will
1. remove (with state='absent')
2. ensure tags are applied (if state='present' and tags are provided
3. attach it to a VPC (if state='present' and a vpc_id is provided.
If any of the optional values are missing, they will either be treated
as a no-op (i.e., inherit what already exists for the VPC)
To remove existing options while inheriting, supply an empty value
(e.g. set ntp_servers to [] if you want to remove them from the VPC's options)
Most of the options should be self-explanatory.
author: "Joel Thompson (@joelthompson)"
version_added: 2.1
options:
domain_name:
description:
- The domain name to set in the DHCP option sets
required: false
default: None
dns_servers:
description:
- A list of hosts to set the DNS servers for the VPC to. (Should be a
list of IP addresses rather than host names.)
required: false
default: None
ntp_servers:
description:
- List of hosts to advertise as NTP servers for the VPC.
required: false
default: None
netbios_name_servers:
description:
- List of hosts to advertise as NetBIOS servers.
required: false
default: None
netbios_node_type:
description:
- NetBIOS node type to advertise in the DHCP options.
The AWS recommendation is to use 2 (when using netbios name services)
http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html
required: false
default: None
vpc_id:
description:
- VPC ID to associate with the requested DHCP option set.
If no vpc id is provided, and no matching option set is found then a new
DHCP option set is created.
required: false
default: None
delete_old:
description:
- Whether to delete the old VPC DHCP option set when associating a new one.
This is primarily useful for debugging/development purposes when you
want to quickly roll back to the old option set. Note that this setting
will be ignored, and the old DHCP option set will be preserved, if it
is in use by any other VPC. (Otherwise, AWS will return an error.)
required: false
default: true
inherit_existing:
description:
- For any DHCP options not specified in these parameters, whether to
inherit them from the options set already applied to vpc_id, or to
reset them to be empty.
required: false
default: false
tags:
description:
- Tags to be applied to a VPC options set if a new one is created, or
if the resource_id is provided. (options must match)
required: False
default: None
aliases: [ 'resource_tags']
version_added: "2.1"
dhcp_options_id:
description:
- The resource_id of an existing DHCP options set.
If this is specified, then it will override other settings, except tags
(which will be updated to match)
required: False
default: None
version_added: "2.1"
state:
description:
- create/assign or remove the DHCP options.
If state is set to absent, then a DHCP options set matched either
by id, or tags and options will be removed if possible.
required: False
default: present
choices: [ 'absent', 'present' ]
version_added: "2.1"
extends_documentation_fragment: aws
requirements:
- boto
"""
RETURN = """
new_options:
description: The DHCP options created, associated or found
returned: when appropriate
type: dict
sample:
domain-name-servers:
- 10.0.0.1
- 10.0.1.1
netbois-name-servers:
- 10.0.0.1
- 10.0.1.1
netbios-node-type: 2
domain-name: "my.example.com"
dhcp_options_id:
description: The aws resource id of the primary DCHP options set created, found or removed
type: string
returned: when available
changed:
description: Whether the dhcp options were changed
type: bool
returned: always
"""
EXAMPLES = """
# Completely overrides the VPC DHCP options associated with VPC vpc-123456 and deletes any existing
# DHCP option set that may have been attached to that VPC.
- ec2_vpc_dhcp_options:
domain_name: "foo.example.com"
region: us-east-1
dns_servers:
- 10.0.0.1
- 10.0.1.1
ntp_servers:
- 10.0.0.2
- 10.0.1.2
netbios_name_servers:
- 10.0.0.1
- 10.0.1.1
netbios_node_type: 2
vpc_id: vpc-123456
delete_old: True
inherit_existing: False
# Ensure the DHCP option set for the VPC has 10.0.0.4 and 10.0.1.4 as the specified DNS servers, but
# keep any other existing settings. Also, keep the old DHCP option set around.
- ec2_vpc_dhcp_options:
region: us-east-1
dns_servers:
- "{{groups['dns-primary']}}"
- "{{groups['dns-secondary']}}"
vpc_id: vpc-123456
inherit_existing: True
delete_old: False
## Create a DHCP option set with 4.4.4.4 and 8.8.8.8 as the specified DNS servers, with tags
## but do not assign to a VPC
- ec2_vpc_dhcp_options:
region: us-east-1
dns_servers:
- 4.4.4.4
- 8.8.8.8
tags:
Name: google servers
Environment: Test
## Delete a DHCP options set that matches the tags and options specified
- ec2_vpc_dhcp_options:
region: us-east-1
dns_servers:
- 4.4.4.4
- 8.8.8.8
tags:
Name: google servers
Environment: Test
state: absent
## Associate a DHCP options set with a VPC by ID
- ec2_vpc_dhcp_options:
region: us-east-1
dhcp_options_id: dopt-12345678
vpc_id: vpc-123456
"""
import boto.vpc
import boto.ec2
from boto.exception import EC2ResponseError
import socket
import collections
def get_resource_tags(vpc_conn, resource_id):
return dict((t.name, t.value) for t in vpc_conn.get_all_tags(filters={'resource-id': resource_id}))
def ensure_tags(vpc_conn, resource_id, tags, add_only, check_mode):
try:
cur_tags = get_resource_tags(vpc_conn, resource_id)
if tags == cur_tags:
return {'changed': False, 'tags': cur_tags}
to_delete = dict((k, cur_tags[k]) for k in cur_tags if k not in tags)
if to_delete and not add_only:
vpc_conn.delete_tags(resource_id, to_delete, dry_run=check_mode)
to_add = dict((k, tags[k]) for k in tags if k not in cur_tags)
if to_add:
vpc_conn.create_tags(resource_id, to_add, dry_run=check_mode)
latest_tags = get_resource_tags(vpc_conn, resource_id)
return {'changed': True, 'tags': latest_tags}
except EC2ResponseError as e:
module.fail_json(msg=get_error_message(e.args[2]))
def fetch_dhcp_options_for_vpc(vpc_conn, vpc_id):
"""
Returns the DHCP options object currently associated with the requested VPC ID using the VPC
connection variable.
"""
vpcs = vpc_conn.get_all_vpcs(vpc_ids=[vpc_id])
if len(vpcs) != 1 or vpcs[0].dhcp_options_id == "default":
return None
dhcp_options = vpc_conn.get_all_dhcp_options(dhcp_options_ids=[vpcs[0].dhcp_options_id])
if len(dhcp_options) != 1:
return None
return dhcp_options[0]
def match_dhcp_options(vpc_conn, tags=None, options=None):
"""
Finds a DHCP Options object that optionally matches the tags and options provided
"""
dhcp_options = vpc_conn.get_all_dhcp_options()
for dopts in dhcp_options:
if (not tags) or get_resource_tags(vpc_conn, dopts.id) == tags:
if (not options) or dopts.options == options:
return(True, dopts)
return(False, None)
def remove_dhcp_options_by_id(vpc_conn, dhcp_options_id):
associations = vpc_conn.get_all_vpcs(filters={'dhcpOptionsId': dhcp_options_id})
if len(associations) > 0:
return False
else:
vpc_conn.delete_dhcp_options(dhcp_options_id)
return True
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
dhcp_options_id=dict(type='str', default=None),
domain_name=dict(type='str', default=None),
dns_servers=dict(type='list', default=None),
ntp_servers=dict(type='list', default=None),
netbios_name_servers=dict(type='list', default=None),
netbios_node_type=dict(type='int', default=None),
vpc_id=dict(type='str', default=None),
delete_old=dict(type='bool', default=True),
inherit_existing=dict(type='bool', default=False),
tags=dict(type='dict', default=None, aliases=['resource_tags']),
state=dict(type='str', default='present', choices=['present', 'absent'])
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
params = module.params
found = False
changed = False
new_options = collections.defaultdict(lambda: None)
region, ec2_url, boto_params = get_aws_connection_info(module)
connection = connect_to_aws(boto.vpc, region, **boto_params)
existing_options = None
# First check if we were given a dhcp_options_id
if not params['dhcp_options_id']:
# No, so create new_options from the parameters
if params['dns_servers'] != None:
new_options['domain-name-servers'] = params['dns_servers']
if params['netbios_name_servers'] != None:
new_options['netbios-name-servers'] = params['netbios_name_servers']
if params['ntp_servers'] != None:
new_options['ntp-servers'] = params['ntp_servers']
if params['domain_name'] != None:
# needs to be a list for comparison with boto objects later
new_options['domain-name'] = [ params['domain_name'] ]
if params['netbios_node_type'] != None:
# needs to be a list for comparison with boto objects later
new_options['netbios-node-type'] = [ str(params['netbios_node_type']) ]
# If we were given a vpc_id then we need to look at the options on that
if params['vpc_id']:
existing_options = fetch_dhcp_options_for_vpc(connection, params['vpc_id'])
# if we've been asked to inherit existing options, do that now
if params['inherit_existing']:
if existing_options:
for option in [ 'domain-name-servers', 'netbios-name-servers', 'ntp-servers', 'domain-name', 'netbios-node-type']:
if existing_options.options.get(option) and new_options[option] != [] and (not new_options[option] or [''] == new_options[option]):
new_options[option] = existing_options.options.get(option)
# Do the vpc's dhcp options already match what we're asked for? if so we are done
if existing_options and new_options == existing_options.options:
module.exit_json(changed=changed, new_options=new_options, dhcp_options_id=existing_options.id)
# If no vpc_id was given, or the options don't match then look for an existing set using tags
found, dhcp_option = match_dhcp_options(connection, params['tags'], new_options)
# Now let's cover the case where there are existing options that we were told about by id
# If a dhcp_options_id was supplied we don't look at options inside, just set tags (if given)
else:
supplied_options = connection.get_all_dhcp_options(filters={'dhcp-options-id':params['dhcp_options_id']})
if len(supplied_options) != 1:
if params['state'] != 'absent':
module.fail_json(msg=" a dhcp_options_id was supplied, but does not exist")
else:
found = True
dhcp_option = supplied_options[0]
if params['state'] != 'absent' and params['tags']:
ensure_tags(connection, dhcp_option.id, params['tags'], False, module.check_mode)
# Now we have the dhcp options set, let's do the necessary
# if we found options we were asked to remove then try to do so
if params['state'] == 'absent':
if not module.check_mode:
if found:
changed = remove_dhcp_options_by_id(connection, dhcp_option.id)
module.exit_json(changed=changed, new_options={})
# otherwise if we haven't found the required options we have something to do
elif not module.check_mode and not found:
# create some dhcp options if we weren't able to use existing ones
if not found:
# Convert netbios-node-type and domain-name back to strings
if new_options['netbios-node-type']:
new_options['netbios-node-type'] = new_options['netbios-node-type'][0]
if new_options['domain-name']:
new_options['domain-name'] = new_options['domain-name'][0]
# create the new dhcp options set requested
dhcp_option = connection.create_dhcp_options(
new_options['domain-name'],
new_options['domain-name-servers'],
new_options['ntp-servers'],
new_options['netbios-name-servers'],
new_options['netbios-node-type'])
changed = True
if params['tags']:
ensure_tags(connection, dhcp_option.id, params['tags'], False, module.check_mode)
# If we were given a vpc_id, then attach the options we now have to that before we finish
if params['vpc_id'] and not module.check_mode:
changed = True
connection.associate_dhcp_options(dhcp_option.id, params['vpc_id'])
# and remove old ones if that was requested
if params['delete_old'] and existing_options:
remove_dhcp_options_by_id(connection, existing_options.id)
module.exit_json(changed=changed, new_options=new_options, dhcp_options_id=dhcp_option.id)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == "__main__":
main()
| gpl-3.0 | -5,756,831,773,364,804,000 | -4,356,261,545,106,049,500 | 37.745501 | 155 | 0.649151 | false |
nclsHart/glances | glances/plugins/glances_monitor.py | 1 | 4261 | # -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2015 Nicolargo <[email protected]>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Monitor plugin."""
# Import Glances lib
from glances.core.glances_logging import logger
from glances.core.glances_monitor_list import MonitorList as glancesMonitorList
from glances.plugins.glances_plugin import GlancesPlugin
class Plugin(GlancesPlugin):
"""Glances' monitor plugin."""
def __init__(self, args=None):
"""Init the plugin."""
GlancesPlugin.__init__(self, args=args)
# We want to display the stat in the curse interface
self.display_curse = True
# Init stats
self.glances_monitors = None
self.stats = []
def load_limits(self, config):
"""Load the monitored list from the conf file."""
logger.debug("Monitor plugin configuration detected in the configuration file")
self.glances_monitors = glancesMonitorList(config)
def update(self):
"""Update the monitored list."""
if self.get_input() == 'local':
# Monitor list only available in a full Glances environment
# Check if the glances_monitor instance is init
if self.glances_monitors is None:
return self.stats
# Update the monitored list (result of command)
self.glances_monitors.update()
# Put it on the stats var
self.stats = self.glances_monitors.get()
else:
pass
return self.stats
def get_alert(self, nbprocess=0, countmin=None, countmax=None, header="", log=False):
"""Return the alert status relative to the process number."""
if nbprocess is None:
return 'OK'
if countmin is None:
countmin = nbprocess
if countmax is None:
countmax = nbprocess
if nbprocess > 0:
if int(countmin) <= int(nbprocess) <= int(countmax):
return 'OK'
else:
return 'WARNING'
else:
if int(countmin) == 0:
return 'OK'
else:
return 'CRITICAL'
def msg_curse(self, args=None):
"""Return the dict to display in the curse interface."""
# Init the return message
ret = []
# Only process if stats exist and display plugin enable...
if not self.stats or args.disable_process:
return ret
# Build the string message
for m in self.stats:
msg = '{0:<16} '.format(m['description'])
ret.append(self.curse_add_line(
msg, self.get_alert(m['count'], m['countmin'], m['countmax'])))
msg = '{0:<3} '.format(m['count'] if m['count'] > 1 else '')
ret.append(self.curse_add_line(msg))
msg = '{0:13} '.format(_("RUNNING") if m['count'] >= 1 else _("NOT RUNNING"))
ret.append(self.curse_add_line(msg))
# Decode to UTF8 (only for Python 3)
try:
msg = m['result'].decode('utf-8') if m['count'] >= 1 else ''
except (UnicodeError, AttributeError):
try:
msg = m['result'] if m['count'] >= 1 else ''
except UnicodeError:
msg = m['result'].encode('utf-8') if m['count'] >= 1 else ''
ret.append(self.curse_add_line(msg, optional=True, splittable=True))
ret.append(self.curse_new_line())
# Delete the last empty line
try:
ret.pop()
except IndexError:
pass
return ret
| lgpl-3.0 | 8,734,376,950,179,791,000 | -1,410,756,602,994,922,800 | 34.508333 | 89 | 0.58836 | false |
adit-chandra/tensorflow | tensorflow/python/keras/optimizer_v2/nadam.py | 6 | 10023 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Nadam for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend_config
from tensorflow.python.keras.optimizer_v2 import learning_rate_schedule
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.optimizers.Nadam')
class Nadam(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the NAdam algorithm.
Much like Adam is essentially RMSprop with momentum, Nadam is Adam with
Nesterov momentum.
Initialization:
$$m_0 := 0 \text{(Initialize 1st moment vector)}$$
$$v_0 := 0 \text{(Initialize 2nd moment vector)}$$
$$mu_0 := 1$$
$$t := 0 \text{(Initialize timestep)}$$
Computes:
$$t := t + 1$$
$$\mu_t := \beta_1 * (1 - 0.5 * 0.96^{0.004 * t})$$
$$g' := g / (1 - \prod_{i=1}^{t}{\mu_i})$$
$$m_t := \beta_1 * m_{t-1} + (1 - \beta_1) * g$$
$$m' := m_t / (1 - \prod_{i=1}^{t+1}{\mu_i})$$
$$v_t := \beta_2 * v_{t-1} + (1 - \beta_2) * g * g$$
$$v' := v_t / (1 - \beta_2^t)$$
$$\bar{m} := (1 - \mu_t) * g' + \mu_{t+1} * m'$$
$$\theta_t := \theta_{t-1} - lr * \bar{m} / (\sqrt{v'} + \epsilon)$$
gradient is evaluated at theta(t) + momentum * v(t), and the variables always
store theta + beta_1 * m / sqrt(v) instead of theta.
References
See [Dozat, T., 2015](http://cs229.stanford.edu/proj2015/054_report.pdf).
"""
def __init__(self,
learning_rate=0.001,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-7,
name='Nadam',
**kwargs):
"""Construct a new Nadam optimizer.
Args:
learning_rate: A Tensor or a floating point value. The learning rate.
beta_1: A float value or a constant float tensor. The exponential decay
rate for the 1st moment estimates.
beta_2: A float value or a constant float tensor. The exponential decay
rate for the exponentially weighted infinity norm.
epsilon: A small constant for numerical stability.
name: Optional name for the operations created when applying gradients.
Defaults to "Adamax".
**kwargs: keyword arguments. Allowed to be {`clipnorm`, `clipvalue`, `lr`,
`decay`}. `clipnorm` is clip gradients by norm; `clipvalue` is clip
gradients by value, `decay` is included for backward compatibility to
allow time inverse decay of learning rate. `lr` is included for backward
compatibility, recommended to use `learning_rate` instead.
"""
# Backwards compatibility with keras NAdam optimizer.
kwargs['decay'] = kwargs.pop('schedule_decay', 0.004)
learning_rate = kwargs.get('lr', learning_rate)
if isinstance(learning_rate, learning_rate_schedule.LearningRateSchedule):
raise ValueError('The Nadam optimizer does not support '
'tf.keras.optimizers.LearningRateSchedules as the '
'learning rate.')
super(Nadam, self).__init__(name, **kwargs)
self._set_hyper('learning_rate', kwargs.get('lr', learning_rate))
self._set_hyper('decay', self._initial_decay)
self._set_hyper('beta_1', beta_1)
self._set_hyper('beta_2', beta_2)
self.epsilon = epsilon or backend_config.epsilon()
self._m_cache = None
def _create_slots(self, var_list):
var_dtype = var_list[0].dtype.base_dtype
if self._m_cache is None:
self._m_cache = self.add_weight(
'momentum_cache',
shape=[],
dtype=var_dtype,
initializer='ones',
trainable=False,
aggregation=tf_variables.VariableAggregation.ONLY_FIRST_REPLICA)
self._weights.append(self._m_cache)
# Separate for-loops to respect the ordering of slot variables from v1.
for var in var_list:
# Create slots for the first moments.
self.add_slot(var, 'm')
for var in var_list:
# Create slots for the second moments.
self.add_slot(var, 'v')
def _prepare_local(self, var_device, var_dtype, apply_state):
lr_t = array_ops.identity(self._get_hyper('learning_rate', var_dtype))
beta_1_t = array_ops.identity(self._get_hyper('beta_1', var_dtype))
beta_2_t = array_ops.identity(self._get_hyper('beta_2', var_dtype))
local_step = math_ops.cast(self.iterations + 1, var_dtype)
next_step = math_ops.cast(self.iterations + 2, var_dtype)
decay_base = math_ops.cast(0.96, var_dtype)
m_t = beta_1_t * (1. - 0.5 * (
math_ops.pow(decay_base, self._initial_decay * local_step)))
m_t_1 = beta_1_t * (1. - 0.5 * (
math_ops.pow(decay_base, self._initial_decay * next_step)))
m_schedule_new = math_ops.cast(self._m_cache_read, var_dtype) * m_t
if var_dtype is self._m_cache.dtype:
m_schedule_new = array_ops.identity(state_ops.assign(
self._m_cache, m_schedule_new, use_locking=self._use_locking))
m_schedule_next = m_schedule_new * m_t_1
apply_state[(var_device, var_dtype)] = dict(
lr_t=lr_t,
neg_lr_t=-lr_t,
epsilon=ops.convert_to_tensor(self.epsilon, var_dtype),
beta_1_t=beta_1_t,
beta_2_t=beta_2_t,
m_t=m_t,
m_t_1=m_t_1,
one_minus_beta_1_t=1 - beta_1_t,
one_minus_beta_2_t=1 - beta_2_t,
one_minus_m_t=1. - m_t,
one_minus_m_schedule_new=1. - m_schedule_new,
one_minus_m_schedule_next=1. - m_schedule_next,
v_t_prime_denominator=1. - math_ops.pow(beta_2_t, local_step),
)
def _prepare(self, var_list):
# Get the value of the momentum cache before starting to apply gradients.
self._m_cache_read = array_ops.identity(self._m_cache)
return super(Nadam, self)._prepare(var_list)
def _resource_apply_dense(self, grad, var, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
m = self.get_slot(var, 'm')
v = self.get_slot(var, 'v')
g_prime = grad / coefficients['one_minus_m_schedule_new']
m_t = (coefficients['beta_1_t'] * m +
coefficients['one_minus_beta_1_t'] * grad)
m_t = state_ops.assign(m, m_t, use_locking=self._use_locking)
m_t_prime = m_t / coefficients['one_minus_m_schedule_next']
v_t = (coefficients['beta_2_t'] * v +
coefficients['one_minus_beta_2_t'] * math_ops.square(grad))
v_t = state_ops.assign(v, v_t, use_locking=self._use_locking)
v_t_prime = v_t / coefficients['v_t_prime_denominator']
m_t_bar = (coefficients['one_minus_m_t'] * g_prime +
coefficients['m_t_1'] * m_t_prime)
var_t = var - coefficients['lr_t'] * m_t_bar / (
math_ops.sqrt(v_t_prime) + coefficients['epsilon'])
return state_ops.assign(var, var_t, use_locking=self._use_locking).op
def _resource_apply_sparse(self, grad, var, indices, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
m = self.get_slot(var, 'm')
v = self.get_slot(var, 'v')
g_prime = grad / coefficients['one_minus_m_schedule_new']
# m_t = beta1 * m + (1 - beta1) * g_t
m_scaled_g_values = grad * coefficients['one_minus_beta_1_t']
m_t = state_ops.assign(m, m * coefficients['beta_1_t'],
use_locking=self._use_locking)
with ops.control_dependencies([m_t]):
m_t = self._resource_scatter_add(m, indices, m_scaled_g_values)
m_t_slice = array_ops.gather(m_t, indices)
m_t_prime = m_t_slice / coefficients['one_minus_m_schedule_next']
m_t_bar = (coefficients['one_minus_m_t'] * g_prime +
coefficients['m_t_1'] * m_t_prime)
# v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
v_scaled_g_values = (grad * grad) * coefficients['one_minus_beta_2_t']
v_t = state_ops.assign(v, v * coefficients['beta_2_t'],
use_locking=self._use_locking)
with ops.control_dependencies([v_t]):
v_t = self._resource_scatter_add(v, indices, v_scaled_g_values)
v_t_slice = array_ops.gather(v_t, indices)
v_t_prime = v_t_slice / coefficients['v_t_prime_denominator']
v_prime_sqrt_plus_eps = math_ops.sqrt(v_t_prime) + coefficients['epsilon']
var_update = self._resource_scatter_add(
var, indices,
coefficients['neg_lr_t'] * m_t_bar / v_prime_sqrt_plus_eps)
return control_flow_ops.group(*[var_update, m_t_bar, v_t])
def get_config(self):
config = super(Nadam, self).get_config()
config.update({
'learning_rate': self._serialize_hyperparameter('learning_rate'),
'decay': self._serialize_hyperparameter('decay'),
'beta_1': self._serialize_hyperparameter('beta_1'),
'beta_2': self._serialize_hyperparameter('beta_2'),
'epsilon': self.epsilon,
})
return config
| apache-2.0 | -8,133,389,988,194,216,000 | 5,497,889,916,451,802,000 | 41.113445 | 80 | 0.625162 | false |
harshilasu/GraphicMelon | y/google-cloud-sdk/platform/gsutil/third_party/boto/boto/cloudfront/identity.py | 170 | 4483 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import uuid
class OriginAccessIdentity(object):
def __init__(self, connection=None, config=None, id='',
s3_user_id='', comment=''):
self.connection = connection
self.config = config
self.id = id
self.s3_user_id = s3_user_id
self.comment = comment
self.etag = None
def startElement(self, name, attrs, connection):
if name == 'CloudFrontOriginAccessIdentityConfig':
self.config = OriginAccessIdentityConfig()
return self.config
else:
return None
def endElement(self, name, value, connection):
if name == 'Id':
self.id = value
elif name == 'S3CanonicalUserId':
self.s3_user_id = value
elif name == 'Comment':
self.comment = value
else:
setattr(self, name, value)
def update(self, comment=None):
new_config = OriginAccessIdentityConfig(self.connection,
self.config.caller_reference,
self.config.comment)
if comment is not None:
new_config.comment = comment
self.etag = self.connection.set_origin_identity_config(self.id, self.etag, new_config)
self.config = new_config
def delete(self):
return self.connection.delete_origin_access_identity(self.id, self.etag)
def uri(self):
return 'origin-access-identity/cloudfront/%s' % self.id
class OriginAccessIdentityConfig(object):
def __init__(self, connection=None, caller_reference='', comment=''):
self.connection = connection
if caller_reference:
self.caller_reference = caller_reference
else:
self.caller_reference = str(uuid.uuid4())
self.comment = comment
def to_xml(self):
s = '<?xml version="1.0" encoding="UTF-8"?>\n'
s += '<CloudFrontOriginAccessIdentityConfig xmlns="http://cloudfront.amazonaws.com/doc/2009-09-09/">\n'
s += ' <CallerReference>%s</CallerReference>\n' % self.caller_reference
if self.comment:
s += ' <Comment>%s</Comment>\n' % self.comment
s += '</CloudFrontOriginAccessIdentityConfig>\n'
return s
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Comment':
self.comment = value
elif name == 'CallerReference':
self.caller_reference = value
else:
setattr(self, name, value)
class OriginAccessIdentitySummary(object):
def __init__(self, connection=None, id='',
s3_user_id='', comment=''):
self.connection = connection
self.id = id
self.s3_user_id = s3_user_id
self.comment = comment
self.etag = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Id':
self.id = value
elif name == 'S3CanonicalUserId':
self.s3_user_id = value
elif name == 'Comment':
self.comment = value
else:
setattr(self, name, value)
def get_origin_access_identity(self):
return self.connection.get_origin_access_identity_info(self.id)
| gpl-3.0 | 4,473,563,330,743,607,300 | 8,476,908,219,702,533,000 | 36.049587 | 111 | 0.62592 | false |
mattjmcnaughton/kubernetes | cluster/juju/layers/kubernetes-e2e/reactive/kubernetes_e2e.py | 168 | 8148 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charms import layer
from charms.layer import snap
from charms.reactive import hook
from charms.reactive import is_state
from charms.reactive import set_state
from charms.reactive import when
from charms.reactive import when_not
from charms.reactive.helpers import data_changed
from charmhelpers.core import hookenv, unitdata
from shlex import split
from subprocess import check_call
from subprocess import check_output
db = unitdata.kv()
USER = 'system:e2e'
@hook('upgrade-charm')
def reset_delivery_states():
''' Remove the state set when resources are unpacked. '''
install_snaps()
@when('kubernetes-e2e.installed')
def report_status():
''' Report the status of the charm. '''
messaging()
def messaging():
''' Probe our relations to determine the proper messaging to the
end user '''
missing_services = []
if not is_state('kubernetes-master.available'):
missing_services.append('kubernetes-master:http')
if not is_state('certificates.available'):
missing_services.append('certificates')
if not is_state('kubeconfig.ready'):
missing_services.append('kubernetes-master:kube-control')
if missing_services:
if len(missing_services) > 1:
subject = 'relations'
else:
subject = 'relation'
services = ','.join(missing_services)
message = 'Missing {0}: {1}'.format(subject, services)
hookenv.status_set('blocked', message)
return
hookenv.status_set('active', 'Ready to test.')
@when('config.changed.channel')
def channel_changed():
install_snaps()
def install_snaps():
''' Deliver the e2e and kubectl components from the binary resource stream
packages declared in the charm '''
channel = hookenv.config('channel')
hookenv.status_set('maintenance', 'Installing kubectl snap')
snap.install('kubectl', channel=channel, classic=True)
hookenv.status_set('maintenance', 'Installing kubernetes-test snap')
snap.install('kubernetes-test', channel=channel, classic=True)
set_state('kubernetes-e2e.installed')
@when('tls_client.ca.saved', 'tls_client.client.certificate.saved',
'tls_client.client.key.saved', 'kubernetes-master.available',
'kubernetes-e2e.installed', 'e2e.auth.bootstrapped')
@when_not('kubeconfig.ready')
def prepare_kubeconfig_certificates(master):
''' Prepare the data to feed to create the kubeconfig file. '''
layer_options = layer.options('tls-client')
# Get all the paths to the tls information required for kubeconfig.
ca = layer_options.get('ca_certificate_path')
creds = db.get('credentials')
data_changed('kube-control.creds', creds)
servers = get_kube_api_servers(master)
# pedantry
kubeconfig_path = '/home/ubuntu/.kube/config'
# Create kubernetes configuration in the default location for ubuntu.
create_kubeconfig('/root/.kube/config', servers[0], ca,
token=creds['client_token'], user='root')
create_kubeconfig(kubeconfig_path, servers[0], ca,
token=creds['client_token'], user='ubuntu')
# Set permissions on the ubuntu users kubeconfig to ensure a consistent UX
cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path]
check_call(cmd)
messaging()
set_state('kubeconfig.ready')
@when('kube-control.connected')
def request_credentials(kube_control):
""" Request authorization creds."""
# Ask for a user, although we will be using the 'client_token'
kube_control.set_auth_request(USER)
@when('kube-control.auth.available')
def catch_change_in_creds(kube_control):
"""Request a service restart in case credential updates were detected."""
creds = kube_control.get_auth_credentials(USER)
if creds \
and data_changed('kube-control.creds', creds) \
and creds['user'] == USER:
# We need to cache the credentials here because if the
# master changes (master leader dies and replaced by a new one)
# the new master will have no recollection of our certs.
db.set('credentials', creds)
set_state('e2e.auth.bootstrapped')
@when('kubernetes-e2e.installed', 'kubeconfig.ready')
def set_app_version():
''' Declare the application version to juju '''
cmd = ['kubectl', 'version', '--client']
from subprocess import CalledProcessError
try:
version = check_output(cmd).decode('utf-8')
except CalledProcessError:
message = "Missing kubeconfig causes errors. Skipping version set."
hookenv.log(message)
return
git_version = version.split('GitVersion:"v')[-1]
version_from = git_version.split('",')[0]
hookenv.application_version_set(version_from.rstrip())
def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None,
user='ubuntu', context='juju-context',
cluster='juju-cluster', password=None, token=None):
'''Create a configuration for Kubernetes based on path using the supplied
arguments for values of the Kubernetes server, CA, key, certificate, user
context and cluster.'''
if not key and not certificate and not password and not token:
raise ValueError('Missing authentication mechanism.')
# token and password are mutually exclusive. Error early if both are
# present. The developer has requested an impossible situation.
# see: kubectl config set-credentials --help
if token and password:
raise ValueError('Token and Password are mutually exclusive.')
# Create the config file with the address of the master server.
cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \
'--server={2} --certificate-authority={3} --embed-certs=true'
check_call(split(cmd.format(kubeconfig, cluster, server, ca)))
# Delete old users
cmd = 'kubectl config --kubeconfig={0} unset users'
check_call(split(cmd.format(kubeconfig)))
# Create the credentials using the client flags.
cmd = 'kubectl config --kubeconfig={0} ' \
'set-credentials {1} '.format(kubeconfig, user)
if key and certificate:
cmd = '{0} --client-key={1} --client-certificate={2} '\
'--embed-certs=true'.format(cmd, key, certificate)
if password:
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
# This is mutually exclusive from password. They will not work together.
if token:
cmd = "{0} --token={1}".format(cmd, token)
check_call(split(cmd))
# Create a default context with the cluster.
cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \
'--cluster={2} --user={3}'
check_call(split(cmd.format(kubeconfig, context, cluster, user)))
# Make the config use this new context.
cmd = 'kubectl config --kubeconfig={0} use-context {1}'
check_call(split(cmd.format(kubeconfig, context)))
def get_kube_api_servers(master):
'''Return the kubernetes api server address and port for this
relationship.'''
hosts = []
# Iterate over every service from the relation object.
for service in master.services():
for unit in service['hosts']:
hosts.append('https://{0}:{1}'.format(unit['hostname'],
unit['port']))
return hosts
def determine_arch():
''' dpkg wrapper to surface the architecture we are tied to'''
cmd = ['dpkg', '--print-architecture']
output = check_output(cmd).decode('utf-8')
return output.rstrip()
| apache-2.0 | -4,542,364,587,778,620,400 | 1,712,654,132,872,817,700 | 36.036364 | 78 | 0.67489 | false |
minorua/QGIS | tests/src/python/test_qgsvirtuallayerdefinition.py | 28 | 4276 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsVirtualLayerDefinition
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Hugo Mercier'
__date__ = '10/12/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
from qgis.core import (QgsField,
QgsWkbTypes,
QgsFields,
QgsVirtualLayerDefinition
)
from qgis.testing import unittest
from qgis.PyQt.QtCore import QVariant, QUrl
import os
def strToUrl(s):
return QUrl.fromEncoded(bytes(s, "utf8"))
class TestQgsVirtualLayerDefinition(unittest.TestCase):
def test1(self):
d = QgsVirtualLayerDefinition()
self.assertEqual(d.toString(), "")
d.setFilePath("/file")
self.assertEqual(d.toString(), "file:///file")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).filePath(), "/file")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).filePath(), "/file")
d.setFilePath(os.path.join('C:/', 'file'))
self.assertEqual(d.toString(), "file:///C:/file")
# self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).filePath(), os.path.join('C:/', 'file'))
d.setQuery("SELECT * FROM mytable")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).query(), "SELECT * FROM mytable")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).query(), "SELECT * FROM mytable")
q = "SELECT * FROM tableéé /*:int*/"
d.setQuery(q)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).query(), q)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).query(), q)
s1 = "file://foo&bar=okié"
d.addSource("name", s1, "provider", "utf8")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[0].source(), s1)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[0].source(), s1)
n1 = "éé ok"
d.addSource(n1, s1, "provider")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[1].name(), n1)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[1].name(), n1)
d.addSource("ref1", "id0001")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[2].reference(), "id0001")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[2].reference(), "id0001")
s = "dbname='C:\\tt' table=\"test\" (geometry) sql="
d.addSource("nn", s, "spatialite")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).sourceLayers()[3].source(), s)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).sourceLayers()[3].source(), s)
d.setGeometryField("geom")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).geometryField(), "geom")
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).geometryField(), "geom")
d.setGeometryWkbType(QgsWkbTypes.Point)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(d.toUrl()).geometryWkbType(), QgsWkbTypes.Point)
self.assertEqual(QgsVirtualLayerDefinition.fromUrl(strToUrl(d.toString())).geometryWkbType(), QgsWkbTypes.Point)
f = QgsFields()
f.append(QgsField("a", QVariant.Int))
f.append(QgsField("f", QVariant.Double))
f.append(QgsField("s", QVariant.String))
d.setFields(f)
f2 = QgsVirtualLayerDefinition.fromUrl(d.toUrl()).fields()
self.assertEqual(f[0].name(), f2[0].name())
self.assertEqual(f[0].type(), f2[0].type())
self.assertEqual(f[1].name(), f2[1].name())
self.assertEqual(f[1].type(), f2[1].type())
self.assertEqual(f[2].name(), f2[2].name())
self.assertEqual(f[2].type(), f2[2].type())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -1,977,687,202,182,951,400 | -191,782,732,759,721,570 | 44.924731 | 123 | 0.65933 | false |
hardanimal/UFT_UPGEM | Lib/site-packages/pip-1.2.1-py2.7.egg/pip/vcs/__init__.py | 19 | 8752 | """Handles all VCS (version control) support"""
import os
import shutil
from pip.backwardcompat import urlparse, urllib
from pip.log import logger
from pip.util import (display_path, backup_dir, find_command,
ask, rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
# Register more schemes with urlparse for various version control systems
urlparse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urlparse, 'uses_fragment', None):
urlparse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
def __iter__(self):
return self._registry.__iter__()
@property
def backends(self):
return list(self._registry.values())
@property
def dirnames(self):
return [backend.dirname for backend in self.backends]
@property
def all_schemes(self):
schemes = []
for backend in self.backends:
schemes.extend(backend.schemes)
return schemes
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warn('Cannot register VCS %s' % cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
def unregister(self, cls=None, name=None):
if name in self._registry:
del self._registry[name]
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warn('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
Return the name of the version control backend if found at given
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
path = os.path.join(location, vc_type.dirname)
if os.path.exists(path):
return vc_type.name
return None
def get_backend(self, name):
name = name.lower()
if name in self._registry:
return self._registry[name]
def get_backend_from_location(self, location):
vc_type = self.get_backend_name(location)
if vc_type:
return self.get_backend(vc_type)
return None
vcs = VcsSupport()
class VersionControl(object):
name = ''
dirname = ''
def __init__(self, url=None, *args, **kwargs):
self.url = url
self._cmd = None
super(VersionControl, self).__init__(*args, **kwargs)
def _filter(self, line):
return (logger.INFO, line)
def _is_local_repository(self, repo):
"""
posix absolute paths start with os.path.sep,
win32 ones ones start with drive (like c:\\folder)
"""
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
@property
def cmd(self):
if self._cmd is not None:
return self._cmd
command = find_command(self.name)
logger.info('Found command %r at %r' % (self.name, command))
self._cmd = command
return command
def get_url_rev(self):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
"""
error_message= (
"Sorry, '%s' is a malformed VCS url. "
"Ihe format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp")
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urlparse.urlsplit(url)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def get_info(self, location):
"""
Returns (url, revision), where both are strings
"""
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
Normalize a URL for comparison by unquoting it and removing any trailing slash.
"""
return urllib.unquote(url).rstrip('/')
def compare_urls(self, url1, url2):
"""
Compare two repo URLs for identity, ignoring incidental differences.
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def parse_vcs_bundle_file(self, content):
"""
Takes the contents of the bundled text file that explains how to revert
the stripped off version control data of the given package and returns
the URL and revision of it.
"""
raise NotImplementedError
def obtain(self, dest):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
"""
raise NotImplementedError
def switch(self, dest, url, rev_options):
"""
Switch the repo at ``dest`` to point to ``URL``.
"""
raise NotImplemented
def update(self, dest, rev_options):
"""
Update an already-existing repo to the given ``rev_options``.
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options, rev_display):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
"""
checkout = True
prompt = False
if os.path.exists(dest):
checkout = False
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.info('%s in %s exists, and has correct URL (%s)' %
(self.repo_name.title(), display_path(dest),
url))
logger.notify('Updating %s %s%s' %
(display_path(dest), self.repo_name,
rev_display))
self.update(dest, rev_options)
else:
logger.warn('%s %s in %s exists with URL %s' %
(self.name, self.repo_name,
display_path(dest), existing_url))
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warn('Directory %s already exists, '
'and is not a %s %s.' %
(dest, self.name, self.repo_name))
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warn('The plan is to install the %s repository %s' %
(self.name, url))
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.notify('Switching %s %s to %s%s' %
(self.repo_name, display_path(dest), url,
rev_display))
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warn('Deleting %s' % display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warn('Backing up %s to %s'
% (display_path(dest), dest_dir))
shutil.move(dest, dest_dir)
checkout = True
return checkout
def unpack(self, location):
if os.path.exists(location):
rmtree(location)
self.obtain(location)
def get_src_requirement(self, dist, location, find_tags=False):
raise NotImplementedError
def get_src_requirement(dist, location, find_tags):
version_control = vcs.get_backend_from_location(location)
if version_control:
return version_control().get_src_requirement(dist, location, find_tags)
logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
return dist.as_requirement()
| gpl-3.0 | 5,602,229,932,982,742,000 | 7,171,685,954,807,643,000 | 33.868526 | 146 | 0.544561 | false |
pdav/khal | khal/__main__.py | 4 | 1179 | # Copyright (c) 2013-2021 khal contributors
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from khal.cli import main_khal
if __name__ == '__main__':
main_khal()
| mit | 3,896,981,356,162,833,000 | 7,236,668,734,725,094,000 | 46.16 | 72 | 0.765903 | false |
luogangyi/Ceilometer-oVirt | ceilometer/openstack/common/eventlet_backdoor.py | 5 | 4781 | # Copyright (c) 2012 OpenStack Foundation.
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import errno
import gc
import os
import pprint
import socket
import sys
import traceback
import eventlet
import eventlet.backdoor
import greenlet
from oslo.config import cfg
from ceilometer.openstack.common.gettextutils import _LI
from ceilometer.openstack.common import log as logging
help_for_backdoor_port = (
"Acceptable values are 0, <port>, and <start>:<end>, where 0 results "
"in listening on a random tcp port number; <port> results in listening "
"on the specified port number (and not enabling backdoor if that port "
"is in use); and <start>:<end> results in listening on the smallest "
"unused port number within the specified range of port numbers. The "
"chosen port is displayed in the service's log file.")
eventlet_backdoor_opts = [
cfg.StrOpt('backdoor_port',
help="Enable eventlet backdoor. %s" % help_for_backdoor_port)
]
CONF = cfg.CONF
CONF.register_opts(eventlet_backdoor_opts)
LOG = logging.getLogger(__name__)
class EventletBackdoorConfigValueError(Exception):
def __init__(self, port_range, help_msg, ex):
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
'%(help)s' %
{'range': port_range, 'ex': ex, 'help': help_msg})
super(EventletBackdoorConfigValueError, self).__init__(msg)
self.port_range = port_range
def _dont_use_this():
print("Don't use this, just disconnect instead")
def _find_objects(t):
return [o for o in gc.get_objects() if isinstance(o, t)]
def _print_greenthreads():
for i, gt in enumerate(_find_objects(greenlet.greenlet)):
print(i, gt)
traceback.print_stack(gt.gr_frame)
print()
def _print_nativethreads():
for threadId, stack in sys._current_frames().items():
print(threadId)
traceback.print_stack(stack)
print()
def _parse_port_range(port_range):
if ':' not in port_range:
start, end = port_range, port_range
else:
start, end = port_range.split(':', 1)
try:
start, end = int(start), int(end)
if end < start:
raise ValueError
return start, end
except ValueError as ex:
raise EventletBackdoorConfigValueError(port_range, ex,
help_for_backdoor_port)
def _listen(host, start_port, end_port, listen_func):
try_port = start_port
while True:
try:
return listen_func((host, try_port))
except socket.error as exc:
if (exc.errno != errno.EADDRINUSE or
try_port >= end_port):
raise
try_port += 1
def initialize_if_enabled():
backdoor_locals = {
'exit': _dont_use_this, # So we don't exit the entire process
'quit': _dont_use_this, # So we don't exit the entire process
'fo': _find_objects,
'pgt': _print_greenthreads,
'pnt': _print_nativethreads,
}
if CONF.backdoor_port is None:
return None
start_port, end_port = _parse_port_range(str(CONF.backdoor_port))
# NOTE(johannes): The standard sys.displayhook will print the value of
# the last expression and set it to __builtin__._, which overwrites
# the __builtin__._ that gettext sets. Let's switch to using pprint
# since it won't interact poorly with gettext, and it's easier to
# read the output too.
def displayhook(val):
if val is not None:
pprint.pprint(val)
sys.displayhook = displayhook
sock = _listen('localhost', start_port, end_port, eventlet.listen)
# In the case of backdoor port being zero, a port number is assigned by
# listen(). In any case, pull the port number out here.
port = sock.getsockname()[1]
LOG.info(
_LI('Eventlet backdoor listening on %(port)s for process %(pid)d') %
{'port': port, 'pid': os.getpid()}
)
eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock,
locals=backdoor_locals)
return port
| apache-2.0 | 5,576,735,345,220,779,000 | 2,234,271,686,696,006,000 | 31.972414 | 78 | 0.646936 | false |
andymckay/addons-server | src/olympia/blocklist/views.py | 2 | 7809 | import base64
import collections
import hashlib
from datetime import datetime
from operator import attrgetter
import time
from django.core.cache import cache
from django.db.models import Q, signals as db_signals
from django.db.transaction import non_atomic_requests
from django.http import JsonResponse
from django.shortcuts import get_object_or_404, render
from django.utils.cache import patch_cache_control
from django.utils.encoding import smart_str
from olympia.amo.utils import sorted_groupby
from olympia.amo.tasks import flush_front_end_cache_urls
from olympia.versions.compare import version_int
from .models import (
BlocklistApp, BlocklistCA, BlocklistDetail, BlocklistGfx, BlocklistItem,
BlocklistIssuerCert, BlocklistPlugin)
from .utils import (
certificates_to_json, addons_to_json, plugins_to_json, gfxs_to_json)
App = collections.namedtuple('App', 'guid min max')
BlItem = collections.namedtuple('BlItem', 'rows os modified block_id prefs')
@non_atomic_requests
def blocklist(request, apiver, app, appver):
key = 'blocklist:%s:%s:%s' % (apiver, app, appver)
# Use md5 to make sure the memcached key is clean.
key = hashlib.md5(smart_str(key)).hexdigest()
cache.add('blocklist:keyversion', 1)
version = cache.get('blocklist:keyversion')
response = cache.get(key, version=version)
if response is None:
response = _blocklist(request, apiver, app, appver)
cache.set(key, response, 60 * 60, version=version)
patch_cache_control(response, max_age=60 * 60)
return response
def _blocklist(request, apiver, app, appver):
apiver = int(apiver)
items = get_items(apiver, app, appver)[0]
plugins = get_plugins(apiver, app, appver)
gfxs = BlocklistGfx.objects.filter(Q(guid__isnull=True) | Q(guid=app))
issuerCertBlocks = BlocklistIssuerCert.objects.all()
cas = None
try:
cas = BlocklistCA.objects.all()[0]
# base64encode does not allow str as argument
cas = base64.b64encode(cas.data.encode('utf-8'))
except IndexError:
pass
# Find the latest created/modified date across all sections.
all_ = list(items.values()) + list(plugins) + list(gfxs)
last_update = max(x.modified for x in all_) if all_ else datetime.now()
# The client expects milliseconds, Python's time returns seconds.
last_update = int(time.mktime(last_update.timetuple()) * 1000)
data = dict(items=items, plugins=plugins, gfxs=gfxs, apiver=apiver,
appguid=app, appver=appver, last_update=last_update, cas=cas,
issuerCertBlocks=issuerCertBlocks)
return render(request, 'blocklist/blocklist.xml', data,
content_type='text/xml')
def clear_blocklist(*args, **kw):
# Something in the blocklist changed; invalidate all responses.
cache.add('blocklist:keyversion', 1)
cache.incr('blocklist:keyversion')
flush_front_end_cache_urls.delay(['/blocklist/*'])
for m in (BlocklistItem, BlocklistPlugin, BlocklistGfx, BlocklistApp,
BlocklistCA, BlocklistDetail, BlocklistIssuerCert):
db_signals.post_save.connect(clear_blocklist, sender=m,
dispatch_uid='save_%s' % m)
db_signals.post_delete.connect(clear_blocklist, sender=m,
dispatch_uid='delete_%s' % m)
def get_items(apiver=None, app=None, appver=None, groupby='guid'):
# Collapse multiple blocklist items (different version ranges) into one
# item and collapse each item's apps.
if app:
app_query = Q(app__guid__isnull=True) | Q(app__guid=app)
else:
# This is useful to make the LEFT OUTER JOIN with blapps then
# used in the extra clause.
app_query = Q(app__isnull=True) | Q(app__isnull=False)
addons = (BlocklistItem.objects.no_cache()
.select_related('details')
.prefetch_related('prefs')
.filter(app_query)
.order_by('-modified')
.extra(select={'app_guid': 'blapps.guid',
'app_min': 'blapps.min',
'app_max': 'blapps.max'}))
items, details = {}, {}
for guid, rows in sorted_groupby(addons, groupby):
rows = list(rows)
rr = []
prefs = []
for id, rs in sorted_groupby(rows, 'id'):
rs = list(rs)
rr.append(rs[0])
prefs.extend(p.pref for p in rs[0].prefs.all())
rs[0].apps = [App(r.app_guid, r.app_min, r.app_max)
for r in rs if r.app_guid]
os = [r.os for r in rr if r.os]
items[guid] = BlItem(rr, os[0] if os else None, rows[0].modified,
rows[0].block_id, prefs)
details[guid] = sorted(rows, key=attrgetter('id'))[0]
return items, details
def get_plugins(apiver=3, app=None, appver=None):
# API versions < 3 ignore targetApplication entries for plugins so only
# block the plugin if the appver is within the block range.
if app:
app_query = (Q(app__isnull=True) |
Q(app__guid=app) |
Q(app__guid__isnull=True))
else:
app_query = Q(app__isnull=True) | Q(app__isnull=False)
plugins = (BlocklistPlugin.objects.no_cache().select_related('details')
.filter(app_query)
.extra(select={'app_guid': 'blapps.guid',
'app_min': 'blapps.min',
'app_max': 'blapps.max'}))
if apiver < 3 and appver is not None:
def between(ver, min, max):
if not (min and max):
return True
return version_int(min) < ver < version_int(max)
app_version = version_int(appver)
plugins = [p for p in plugins if between(app_version, p.app_min,
p.app_max)]
return list(plugins)
@non_atomic_requests
def blocklist_json(request):
key = 'blocklist:json'
cache.add('blocklist:keyversion', 1)
version = cache.get('blocklist:keyversion')
response = cache.get(key, version=version)
if response is None:
response = _blocklist_json(request)
cache.set(key, response, 60 * 60, version=version)
patch_cache_control(response, max_age=60 * 60)
return response
def _blocklist_json(request):
"""Export the whole blocklist in JSON.
It will select blocklists for all apps.
"""
items, _ = get_items(groupby='id')
plugins = get_plugins()
issuerCertBlocks = BlocklistIssuerCert.objects.all()
gfxs = BlocklistGfx.objects.all()
ca = None
try:
ca = BlocklistCA.objects.all()[0]
# base64encode does not allow str as argument
ca = base64.b64encode(ca.data.encode('utf-8'))
except IndexError:
pass
last_update = int(round(time.time() * 1000))
results = {
'last_update': last_update,
'certificates': certificates_to_json(issuerCertBlocks),
'addons': addons_to_json(items),
'plugins': plugins_to_json(plugins),
'gfx': gfxs_to_json(gfxs),
'ca': ca,
}
return JsonResponse(results)
@non_atomic_requests
def blocked_list(request, apiver=3):
app = request.APP.guid
objs = get_items(apiver, app)[1].values() + get_plugins(apiver, app)
items = sorted(objs, key=attrgetter('created'), reverse=True)
return render(request, 'blocklist/blocked_list.html', {'items': items})
# The id is prefixed with [ip] so we know which model to use.
@non_atomic_requests
def blocked_detail(request, id):
bltypes = dict((m._type, m) for m in (BlocklistItem, BlocklistPlugin))
item = get_object_or_404(bltypes[id[0]], details=id[1:])
return render(request, 'blocklist/blocked_detail.html', {'item': item})
| bsd-3-clause | 555,808,668,748,443,460 | -8,244,902,696,655,185,000 | 35.661972 | 77 | 0.629914 | false |
codeworldprodigy/lab4 | lib/flask/flask/sessions.py | 348 | 12882 | # -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import uuid
import hashlib
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from ._compat import iteritems, text_type
from itsdangerous import URLSafeTimedSerializer, BadSignature
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes `False` in.
new = False
#: for some backends this will always be `True`, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes `True` in.
modified = True
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif isinstance(value, uuid.UUID):
return {' u': value.hex}
elif callable(getattr(value, '__html__', None)):
return {' m': text_type(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in iteritems(value))
elif isinstance(value, str):
try:
return text_type(value)
except UnicodeError:
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = next(iteritems(obj))
if the_key == ' t':
return tuple(the_value)
elif the_key == ' u':
return uuid.UUID(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Baseclass for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns `None` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop of the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's `None`.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or `None` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def open_session(self, app, request):
"""This method has to be implemented and must either return `None`
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
from flask.debughelpers import UnexpectedUnicodeError
| apache-2.0 | 7,455,630,736,216,349,000 | -2,064,299,357,913,672,200 | 38.394495 | 79 | 0.626533 | false |
WASPACDC/hmdsm.repository | plugin.video.loganaddon/Images/resources/lib/sources/dizibox_tv.py | 20 | 6432 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import cache
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://www.dizibox.com'
def dizibox_shows(self):
try:
result = client.source(self.base_link)
result = client.parseDOM(result, 'input', {'id': 'filterAllCategories'})[0]
result = client.parseDOM(result, 'li')
result = zip(client.parseDOM(result, 'a', ret='href'), client.parseDOM(result, 'a'))
result = [(re.sub('http.+?//.+?/','/', i[0]), cleantitle.tv(i[1])) for i in result]
return result
except:
return
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
result = cache.get(self.dizibox_shows, 72)
tvshowtitle = cleantitle.tv(tvshowtitle)
result = [i[0] for i in result if tvshowtitle == i[1]][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
season, episode = '%01d' % int(season), '%01d' % int(episode)
result = client.source(url)
if not season == '1':
url = client.parseDOM(result, 'a', ret='href', attrs = {'class': 'season-.+?'})
url = [i for i in url if '/%s-sezon-' % season in i][0]
result = client.source(url)
result = client.parseDOM(result, 'a', ret='href')
result = [i for i in result if '%s-sezon-%s-bolum-' % (season, episode) in i][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
sources_url = urlparse.urljoin(self.base_link, url)
result = client.source(sources_url, close=False)
result = re.sub(r'[^\x00-\x7F]+','', result)
result = re.compile('(<option.*?</option>)', re.DOTALL).findall(result)
result = [(client.parseDOM(i, 'option', ret='href'), client.parseDOM(i, 'option', ret='value'), client.parseDOM(i, 'option')) for i in result]
result = [i[0] + i[1] for i in result if len(i[2]) > 0 and i[2][0] == 'Altyazsz'][0][0]
url = urlparse.urljoin(self.base_link, result)
result = client.source(url, close=False)
url = client.parseDOM(result, 'span', attrs = {'class': 'object-wrapper'})[0]
url = client.parseDOM(url, 'iframe', ret='src')[0]
url = client.replaceHTMLCodes(url)
result = client.source(url, close=False)
try:
r = re.compile('"?file"?\s*:\s*"([^"]+)"\s*,\s*"?label"?\s*:\s*"(\d+)p?"').findall(result)
if r == []: raise Exception()
r = [(i[0].replace('\\/', '/').replace('\\&', '&').decode('unicode_escape'), int(i[1])) for i in r]
u = [('%s|User-Agent=%s&Referer=%s' % (i[0], urllib.quote_plus(client.agent()), urllib.quote_plus(sources_url)), i[1], 'Dizibox') for i in r if not 'google' in i[0]]
u += [(i[0], i[1], 'GVideo') for i in r if 'google' in i[0]]
try: sources.append({'source': [i[2] for i in u if i[1] >= 1080][0], 'quality': '1080p', 'provider': 'Dizibox', 'url': [i[0] for i in u if i[1] >= 1080][0]})
except: pass
try: sources.append({'source': [i[2] for i in u if 720 <= i[1] < 1080][0], 'quality': 'HD', 'provider': 'Dizibox', 'url': [i[0] for i in u if 720 <= i[1] < 1080][0]})
except: pass
try: sources.append({'source': [i[2] for i in u if i[1] < 720][0], 'quality': 'SD', 'provider': 'Dizibox', 'url': [i[0] for i in u if i[1] < 720][0]})
except: pass
return sources
except:
pass
try:
if '.dizibox.' in url: url = re.compile('location\.href\s*=\s*"(.+?)"').findall(result)[0]
host = urlparse.urlparse(url).netloc
host = host.replace('mail.ru', 'mailru.ru').rsplit('.', 1)[0].split('.')[-1].lower()
strm = resolvers.request(url)
if strm == url or strm == None: raise Exception()
if type(strm) == list:
for i in strm: sources.append({'source': host, 'quality': i['quality'], 'provider': 'Dizibox', 'url': i['url']})
else:
sources.append({'source': host, 'quality': 'HD', 'provider': 'Dizibox', 'url': strm})
return sources
except:
pass
except:
return sources
def resolve(self, url):
try:
if not 'google' in url: return url
if url.startswith('stack://'): return url
url = client.request(url, output='geturl')
if 'requiressl=yes' in url: url = url.replace('http://', 'https://')
else: url = url.replace('https://', 'http://')
return url
except:
return
| gpl-2.0 | 2,600,971,416,539,020,000 | 2,274,789,360,478,045,200 | 36.614035 | 182 | 0.532027 | false |
taaviteska/django | django/contrib/auth/middleware.py | 82 | 5399 | from django.conf import settings
from django.contrib import auth
from django.contrib.auth import load_backend
from django.contrib.auth.backends import RemoteUserBackend
from django.core.exceptions import ImproperlyConfigured
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import SimpleLazyObject
def get_user(request):
if not hasattr(request, '_cached_user'):
request._cached_user = auth.get_user(request)
return request._cached_user
class AuthenticationMiddleware(MiddlewareMixin):
def process_request(self, request):
assert hasattr(request, 'session'), (
"The Django authentication middleware requires session middleware "
"to be installed. Edit your MIDDLEWARE%s setting to insert "
"'django.contrib.sessions.middleware.SessionMiddleware' before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
) % ("_CLASSES" if settings.MIDDLEWARE is None else "")
request.user = SimpleLazyObject(lambda: get_user(request))
class RemoteUserMiddleware(MiddlewareMixin):
"""
Middleware for utilizing Web-server-provided authentication.
If request.user is not authenticated, then this middleware attempts to
authenticate the username passed in the ``REMOTE_USER`` request header.
If authentication is successful, the user is automatically logged in to
persist the user in the session.
The header used is configurable and defaults to ``REMOTE_USER``. Subclass
this class and change the ``header`` attribute if you need to use a
different header.
"""
# Name of request header to grab username from. This will be the key as
# used in the request.META dictionary, i.e. the normalization of headers to
# all uppercase and the addition of "HTTP_" prefix apply.
header = "REMOTE_USER"
force_logout_if_no_header = True
def process_request(self, request):
# AuthenticationMiddleware is required so that request.user exists.
if not hasattr(request, 'user'):
raise ImproperlyConfigured(
"The Django remote user auth middleware requires the"
" authentication middleware to be installed. Edit your"
" MIDDLEWARE setting to insert"
" 'django.contrib.auth.middleware.AuthenticationMiddleware'"
" before the RemoteUserMiddleware class.")
try:
username = request.META[self.header]
except KeyError:
# If specified header doesn't exist then remove any existing
# authenticated remote-user, or return (leaving request.user set to
# AnonymousUser by the AuthenticationMiddleware).
if self.force_logout_if_no_header and request.user.is_authenticated:
self._remove_invalid_user(request)
return
# If the user is already authenticated and that user is the user we are
# getting passed in the headers, then the correct user is already
# persisted in the session and we don't need to continue.
if request.user.is_authenticated:
if request.user.get_username() == self.clean_username(username, request):
return
else:
# An authenticated user is associated with the request, but
# it does not match the authorized user in the header.
self._remove_invalid_user(request)
# We are seeing this user for the first time in this session, attempt
# to authenticate the user.
user = auth.authenticate(request, remote_user=username)
if user:
# User is valid. Set request.user and persist user in the session
# by logging the user in.
request.user = user
auth.login(request, user)
def clean_username(self, username, request):
"""
Allow the backend to clean the username, if the backend defines a
clean_username method.
"""
backend_str = request.session[auth.BACKEND_SESSION_KEY]
backend = auth.load_backend(backend_str)
try:
username = backend.clean_username(username)
except AttributeError: # Backend has no clean_username method.
pass
return username
def _remove_invalid_user(self, request):
"""
Remove the current authenticated user in the request which is invalid
but only if the user is authenticated via the RemoteUserBackend.
"""
try:
stored_backend = load_backend(request.session.get(auth.BACKEND_SESSION_KEY, ''))
except ImportError:
# backend failed to load
auth.logout(request)
else:
if isinstance(stored_backend, RemoteUserBackend):
auth.logout(request)
class PersistentRemoteUserMiddleware(RemoteUserMiddleware):
"""
Middleware for Web-server provided authentication on logon pages.
Like RemoteUserMiddleware but keeps the user authenticated even if
the header (``REMOTE_USER``) is not found in the request. Useful
for setups when the external authentication via ``REMOTE_USER``
is only expected to happen on some "logon" URL and the rest of
the application wants to use Django's authentication mechanism.
"""
force_logout_if_no_header = False
| bsd-3-clause | -7,658,355,632,177,686,000 | 2,788,984,769,745,781,000 | 42.894309 | 92 | 0.670865 | false |
davidmueller13/xbmc | lib/gtest/test/gtest_xml_output_unittest.py | 356 | 13525 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = '[email protected] (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def _GetXmlOutput(self, gtest_prog_name, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = [gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml, expected_exit_code):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| gpl-2.0 | 9,129,682,897,055,209,000 | 5,590,071,688,872,693,000 | 46.623239 | 225 | 0.683623 | false |
jvanbrug/alanaldavista | boto/ec2/cloudwatch/datapoint.py | 56 | 1668 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from datetime import datetime
class Datapoint(dict):
def __init__(self, connection=None):
dict.__init__(self)
self.connection = connection
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name in ['Average', 'Maximum', 'Minimum', 'Sum', 'SampleCount']:
self[name] = float(value)
elif name == 'Timestamp':
self[name] = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name != 'member':
self[name] = value
| mit | 7,298,982,917,943,188,000 | -6,279,233,555,881,212,000 | 40.7 | 75 | 0.705036 | false |
martynovp/edx-platform | common/lib/xmodule/xmodule/videoannotation_module.py | 107 | 6445 | """
Module for Video annotations using annotator.
"""
from lxml import etree
from pkg_resources import resource_string
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
from xblock.core import Scope, String
from xmodule.annotator_mixin import get_instructions, get_extension
from xmodule.annotator_token import retrieve_token
from xblock.fragment import Fragment
import textwrap
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
class AnnotatableFields(object):
""" Fields for `VideoModule` and `VideoDescriptor`. """
data = String(
help=_("XML data for the annotation"),
scope=Scope.content,
default=textwrap.dedent("""\
<annotatable>
<instructions>
<p>
Add the instructions to the assignment here.
</p>
</instructions>
</annotatable>
"""))
display_name = String(
display_name=_("Display Name"),
help=_("Display name for this module"),
scope=Scope.settings,
default=_('Video Annotation'),
)
sourceurl = String(
help=_("The external source URL for the video."),
display_name=_("Source URL"),
scope=Scope.settings, default="http://video-js.zencoder.com/oceans-clip.mp4"
)
poster_url = String(
help=_("Poster Image URL"),
display_name=_("Poster URL"),
scope=Scope.settings,
default=""
)
annotation_storage_url = String(
help=_("Location of Annotation backend"),
scope=Scope.settings,
default="http://your_annotation_storage.com",
display_name=_("Url for Annotation Storage")
)
annotation_token_secret = String(
help=_("Secret string for annotation storage"),
scope=Scope.settings,
default="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
display_name=_("Secret Token String for Annotation")
)
default_tab = String(
display_name=_("Default Annotations Tab"),
help=_("Select which tab will be the default in the annotations table: myNotes, Instructor, or Public."),
scope=Scope.settings,
default="myNotes",
)
# currently only supports one instructor, will build functionality for multiple later
instructor_email = String(
display_name=_("Email for 'Instructor' Annotations"),
help=_("Email of the user that will be attached to all annotations that will be found in 'Instructor' tab."),
scope=Scope.settings,
default="",
)
annotation_mode = String(
display_name=_("Mode for Annotation Tool"),
help=_("Type in number corresponding to following modes: 'instructor' or 'everyone'"),
scope=Scope.settings,
default="everyone",
)
class VideoAnnotationModule(AnnotatableFields, XModule):
'''Video Annotation Module'''
js = {
'coffee': [
resource_string(__name__, 'js/src/javascript_loader.coffee'),
resource_string(__name__, 'js/src/html/display.coffee'),
resource_string(__name__, 'js/src/annotatable/display.coffee'),
],
'js': [
resource_string(__name__, 'js/src/collapsible.js'),
]
}
css = {'scss': [resource_string(__name__, 'css/annotatable/display.scss')]}
icon_class = 'videoannotation'
def __init__(self, *args, **kwargs):
super(VideoAnnotationModule, self).__init__(*args, **kwargs)
xmltree = etree.fromstring(self.data)
self.instructions = self._extract_instructions(xmltree)
self.content = etree.tostring(xmltree, encoding='unicode')
self.user_email = ""
self.is_course_staff = False
if self.runtime.get_user_role() in ['instructor', 'staff']:
self.is_course_staff = True
if self.runtime.get_real_user is not None:
try:
self.user_email = self.runtime.get_real_user(self.runtime.anonymous_student_id).email
except Exception: # pylint: disable=broad-except
self.user_email = _("No email address found.")
def _extract_instructions(self, xmltree):
""" Removes <instructions> from the xmltree and returns them as a string, otherwise None. """
return get_instructions(xmltree)
def _get_extension(self, src_url):
''' get the extension of a given url '''
return get_extension(src_url)
def student_view(self, context):
""" Renders parameters to template. """
extension = self._get_extension(self.sourceurl)
context = {
'course_key': self.runtime.course_id,
'display_name': self.display_name_with_default,
'instructions_html': self.instructions,
'sourceUrl': self.sourceurl,
'typeSource': extension,
'poster': self.poster_url,
'content_html': self.content,
'token': retrieve_token(self.user_email, self.annotation_token_secret),
'annotation_storage': self.annotation_storage_url,
'default_tab': self.default_tab,
'instructor_email': self.instructor_email,
'annotation_mode': self.annotation_mode,
'is_course_staff': self.is_course_staff,
}
fragment = Fragment(self.system.render_template('videoannotation.html', context))
# TinyMCE already exists in Studio so we should not load the files again
# get_real_user always returns "None" in Studio since its runtimes contains no anonymous ids
if self.runtime.get_real_user is not None:
fragment.add_javascript_url(self.runtime.STATIC_URL + "js/vendor/tinymce/js/tinymce/tinymce.full.min.js")
fragment.add_javascript_url(self.runtime.STATIC_URL + "js/vendor/tinymce/js/tinymce/jquery.tinymce.min.js")
return fragment
class VideoAnnotationDescriptor(AnnotatableFields, RawDescriptor):
''' Video annotation descriptor '''
module_class = VideoAnnotationModule
mako_template = "widgets/raw-edit.html"
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(VideoAnnotationDescriptor, self).non_editable_metadata_fields
non_editable_fields.extend([
VideoAnnotationDescriptor.annotation_storage_url,
VideoAnnotationDescriptor.annotation_token_secret,
])
return non_editable_fields
| agpl-3.0 | 8,494,496,709,310,768,000 | 8,610,355,316,547,714,000 | 38.060606 | 119 | 0.632739 | false |
jmartiuk5/python-mode | pymode/libs3/rope/refactor/method_object.py | 91 | 3868 | import warnings
from rope.base import pyobjects, exceptions, change, evaluate, codeanalyze
from rope.refactor import sourceutils, occurrences, rename
class MethodObject(object):
def __init__(self, project, resource, offset):
self.pycore = project.pycore
this_pymodule = self.pycore.resource_to_pyobject(resource)
pyname = evaluate.eval_location(this_pymodule, offset)
if pyname is None or not isinstance(pyname.get_object(),
pyobjects.PyFunction):
raise exceptions.RefactoringError(
'Replace method with method object refactoring should be '
'performed on a function.')
self.pyfunction = pyname.get_object()
self.pymodule = self.pyfunction.get_module()
self.resource = self.pymodule.get_resource()
def get_new_class(self, name):
body = sourceutils.fix_indentation(
self._get_body(), sourceutils.get_indent(self.pycore) * 2)
return 'class %s(object):\n\n%s%sdef __call__(self):\n%s' % \
(name, self._get_init(),
' ' * sourceutils.get_indent(self.pycore), body)
def get_changes(self, classname=None, new_class_name=None):
if new_class_name is not None:
warnings.warn(
'new_class_name parameter is deprecated; use classname',
DeprecationWarning, stacklevel=2)
classname = new_class_name
collector = codeanalyze.ChangeCollector(self.pymodule.source_code)
start, end = sourceutils.get_body_region(self.pyfunction)
indents = sourceutils.get_indents(
self.pymodule.lines, self.pyfunction.get_scope().get_start()) + \
sourceutils.get_indent(self.pycore)
new_contents = ' ' * indents + 'return %s(%s)()\n' % \
(classname, ', '.join(self._get_parameter_names()))
collector.add_change(start, end, new_contents)
insertion = self._get_class_insertion_point()
collector.add_change(insertion, insertion,
'\n\n' + self.get_new_class(classname))
changes = change.ChangeSet('Replace method with method object refactoring')
changes.add_change(change.ChangeContents(self.resource,
collector.get_changed()))
return changes
def _get_class_insertion_point(self):
current = self.pyfunction
while current.parent != self.pymodule:
current = current.parent
end = self.pymodule.lines.get_line_end(current.get_scope().get_end())
return min(end + 1, len(self.pymodule.source_code))
def _get_body(self):
body = sourceutils.get_body(self.pyfunction)
for param in self._get_parameter_names():
body = param + ' = None\n' + body
pymod = self.pycore.get_string_module(body, self.resource)
pyname = pymod[param]
finder = occurrences.create_finder(self.pycore, param, pyname)
result = rename.rename_in_module(finder, 'self.' + param,
pymodule=pymod)
body = result[result.index('\n') + 1:]
return body
def _get_init(self):
params = self._get_parameter_names()
indents = ' ' * sourceutils.get_indent(self.pycore)
if not params:
return ''
header = indents + 'def __init__(self'
body = ''
for arg in params:
new_name = arg
if arg == 'self':
new_name = 'host'
header += ', %s' % new_name
body += indents * 2 + 'self.%s = %s\n' % (arg, new_name)
header += '):'
return '%s\n%s\n' % (header, body)
def _get_parameter_names(self):
return self.pyfunction.get_param_names()
| lgpl-3.0 | 370,966,027,996,892,800 | -2,586,657,691,197,658,600 | 43.45977 | 83 | 0.577818 | false |
ghostlander/p2pool-neoscrypt | p2pool/util/p2protocol.py | 216 | 4144 | '''
Generic message-based protocol used by Bitcoin and P2Pool for P2P communication
'''
import hashlib
import struct
from twisted.internet import protocol
from twisted.python import log
import p2pool
from p2pool.util import datachunker, variable
class TooLong(Exception):
pass
class Protocol(protocol.Protocol):
def __init__(self, message_prefix, max_payload_length, traffic_happened=variable.Event(), ignore_trailing_payload=False):
self._message_prefix = message_prefix
self._max_payload_length = max_payload_length
self.dataReceived2 = datachunker.DataChunker(self.dataReceiver())
self.traffic_happened = traffic_happened
self.ignore_trailing_payload = ignore_trailing_payload
def dataReceived(self, data):
self.traffic_happened.happened('p2p/in', len(data))
self.dataReceived2(data)
def dataReceiver(self):
while True:
start = ''
while start != self._message_prefix:
start = (start + (yield 1))[-len(self._message_prefix):]
command = (yield 12).rstrip('\0')
length, = struct.unpack('<I', (yield 4))
if length > self._max_payload_length:
print 'length too large'
continue
checksum = yield 4
payload = yield length
if hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] != checksum:
print 'invalid hash for', self.transport.getPeer().host, repr(command), length, checksum.encode('hex')
if p2pool.DEBUG:
print hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4].encode('hex'), payload.encode('hex')
self.badPeerHappened()
continue
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
if p2pool.DEBUG:
print 'no type for', repr(command)
continue
try:
self.packetReceived(command, type_.unpack(payload, self.ignore_trailing_payload))
except:
print 'RECV', command, payload[:100].encode('hex') + ('...' if len(payload) > 100 else '')
log.err(None, 'Error handling message: (see RECV line)')
self.disconnect()
def packetReceived(self, command, payload2):
handler = getattr(self, 'handle_' + command, None)
if handler is None:
if p2pool.DEBUG:
print 'no handler for', repr(command)
return
if getattr(self, 'connected', True) and not getattr(self, 'disconnecting', False):
handler(**payload2)
def disconnect(self):
if hasattr(self.transport, 'abortConnection'):
# Available since Twisted 11.1
self.transport.abortConnection()
else:
# This doesn't always close timed out connections! warned about in main
self.transport.loseConnection()
def badPeerHappened(self):
self.disconnect()
def sendPacket(self, command, payload2):
if len(command) >= 12:
raise ValueError('command too long')
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
raise ValueError('invalid command')
#print 'SEND', command, repr(payload2)[:500]
payload = type_.pack(payload2)
if len(payload) > self._max_payload_length:
raise TooLong('payload too long')
data = self._message_prefix + struct.pack('<12sI', command, len(payload)) + hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] + payload
self.traffic_happened.happened('p2p/out', len(data))
self.transport.write(data)
def __getattr__(self, attr):
prefix = 'send_'
if attr.startswith(prefix):
command = attr[len(prefix):]
return lambda **payload2: self.sendPacket(command, payload2)
#return protocol.Protocol.__getattr__(self, attr)
raise AttributeError(attr)
| gpl-3.0 | -3,308,401,975,799,534,000 | 3,175,846,188,472,156,000 | 38.846154 | 155 | 0.587597 | false |
eXistenZNL/SickRage | lib/github/Status.py | 74 | 2548 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class Status(github.GithubObject.NonCompletableGithubObject):
"""
This class represents status as defined in https://status.github.com/api
"""
@property
def status(self):
"""
:type: string
"""
return self._status.value
@property
def last_updated(self):
"""
:type: datetime.datetime
"""
return self._last_updated.value
def _initAttributes(self):
self._status = github.GithubObject.NotSet
self._last_updated = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "status" in attributes: # pragma no branch
self._status = self._makeStringAttribute(attributes["status"])
if "last_updated" in attributes: # pragma no branch
self._last_updated = self._makeDatetimeAttribute(attributes["last_updated"])
| gpl-3.0 | 9,009,058,118,668,086,000 | -420,672,870,249,298,000 | 46.185185 | 88 | 0.470173 | false |
dims/glance | glance/tests/unit/test_glare_plugin_loader.py | 1 | 7402 | # Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
import pkg_resources
from glance.common import exception
from glance.common.glare import loader
from glance.contrib.plugins.artifacts_sample.v1 import artifact as art1
from glance.contrib.plugins.artifacts_sample.v2 import artifact as art2
from glance.tests import utils
class MyArtifactDuplicate(art1.MyArtifact):
__type_version__ = '1.0.1'
__type_name__ = 'MyArtifact'
class MyArtifactOk(art1.MyArtifact):
__type_version__ = '1.0.2'
__type_name__ = 'MyArtifact'
class TestArtifactsLoader(utils.BaseTestCase):
def setUp(self):
self.path = 'glance.contrib.plugins.artifacts_sample'
self._setup_loader(['MyArtifact=%s.v1.artifact:MyArtifact' %
self.path])
super(TestArtifactsLoader, self).setUp()
def _setup_loader(self, artifacts):
self.loader = None
mock_this = 'stevedore.extension.ExtensionManager._find_entry_points'
with mock.patch(mock_this) as fep:
fep.return_value = [
pkg_resources.EntryPoint.parse(art) for art in artifacts]
self.loader = loader.ArtifactsPluginLoader(
'glance.artifacts.types')
def test_load(self):
"""
Plugins can be loaded as entrypoint=sigle plugin and
entrypoint=[a, list, of, plugins]
"""
# single version
self.assertEqual(1, len(self.loader.mgr.extensions))
self.assertEqual(art1.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
# entrypoint = [a, list]
path = os.path.splitext(__file__)[0][__file__.rfind(
'glance'):].replace('/', '.')
self._setup_loader([
'MyArtifact=%s:MyArtifactOk' % path,
'MyArtifact=%s.v2.artifact:MyArtifact' % self.path,
'MyArtifact=%s.v1.artifact:MyArtifact' % self.path]),
self.assertEqual(3, len(self.loader.mgr.extensions))
# returns the plugin with the latest version
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
self.assertEqual(art1.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'1.0.1'))
def test_basic_loader_func(self):
"""Test public methods of PluginLoader class here"""
# type_version 2 == 2.0 == 2.0.0
self._setup_loader(
['MyArtifact=%s.v2.artifact:MyArtifact' % self.path])
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2.0'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2.0.0'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2'))
# now make sure that get_class_by_typename works as well
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_typename('MyArtifact'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_typename('MyArtifact', '2'))
def test_config_validation(self):
"""
Plugins can be loaded on certain conditions:
* entry point name == type_name
* no plugin with the same type_name and version has been already
loaded
"""
path = 'glance.contrib.plugins.artifacts_sample'
# here artifacts specific validation is checked
self.assertRaises(exception.ArtifactNonMatchingTypeName,
self._setup_loader,
['non_matching_name=%s.v1.artifact:MyArtifact' %
path])
# make sure this call is ok
self._setup_loader(['MyArtifact=%s.v1.artifact:MyArtifact' % path])
art_type = self.loader.get_class_by_endpoint('myartifact')
self.assertEqual('MyArtifact', art_type.metadata.type_name)
self.assertEqual('1.0.1', art_type.metadata.type_version)
# now try to add duplicate artifact with the same type_name and
# type_version as already exists
bad_art_path = os.path.splitext(__file__)[0][__file__.rfind(
'glance'):].replace('/', '.')
self.assertEqual(art_type.metadata.type_version,
MyArtifactDuplicate.metadata.type_version)
self.assertEqual(art_type.metadata.type_name,
MyArtifactDuplicate.metadata.type_name)
# should raise an exception as (name, version) is not unique
self.assertRaises(
exception.ArtifactDuplicateNameTypeVersion, self._setup_loader,
['MyArtifact=%s.v1.artifact:MyArtifact' % path,
'MyArtifact=%s:MyArtifactDuplicate' % bad_art_path])
# two artifacts with the same name but different versions coexist fine
self.assertEqual('MyArtifact', MyArtifactOk.metadata.type_name)
self.assertNotEqual(art_type.metadata.type_version,
MyArtifactOk.metadata.type_version)
self._setup_loader(['MyArtifact=%s.v1.artifact:MyArtifact' % path,
'MyArtifact=%s:MyArtifactOk' % bad_art_path])
def test_check_function(self):
"""
A test to show that plugin-load specific options in artifacts.conf
are correctly processed:
* no plugins can be loaded if load_enabled = False
* if available_plugins list is given only plugins specified can be
be loaded
"""
self.config(load_enabled=False)
self.assertRaises(exception.ArtifactLoadError,
self._setup_loader,
['MyArtifact=%s.v1.artifact:MyArtifact' % self.path])
self.config(load_enabled=True, available_plugins=['MyArtifact-1.0.2'])
self.assertRaises(exception.ArtifactLoadError,
self._setup_loader,
['MyArtifact=%s.v1.artifact:MyArtifact' % self.path])
path = os.path.splitext(__file__)[0][__file__.rfind(
'glance'):].replace('/', '.')
self._setup_loader(['MyArtifact=%s:MyArtifactOk' % path])
# make sure that plugin_map has the expected plugin
self.assertEqual(MyArtifactOk,
self.loader.get_class_by_endpoint('myartifact',
'1.0.2'))
| apache-2.0 | -3,361,660,422,473,117,000 | 754,850,191,654,718,600 | 45.553459 | 79 | 0.592002 | false |
savoirfairelinux/django | tests/redirects_tests/tests.py | 4 | 3823 | from django.conf import settings
from django.contrib.redirects.middleware import RedirectFallbackMiddleware
from django.contrib.redirects.models import Redirect
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.test import TestCase, modify_settings, override_settings
@modify_settings(MIDDLEWARE={'append': 'django.contrib.redirects.middleware.RedirectFallbackMiddleware'})
@override_settings(APPEND_SLASH=False, ROOT_URLCONF='redirects_tests.urls', SITE_ID=1)
class RedirectTests(TestCase):
def setUp(self):
self.site = Site.objects.get(pk=settings.SITE_ID)
def test_model(self):
r1 = Redirect.objects.create(site=self.site, old_path='/initial', new_path='/new_target')
self.assertEqual(str(r1), "/initial ---> /new_target")
def test_redirect(self):
Redirect.objects.create(site=self.site, old_path='/initial', new_path='/new_target')
response = self.client.get('/initial')
self.assertRedirects(response, '/new_target', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_with_append_slash(self):
Redirect.objects.create(site=self.site, old_path='/initial/', new_path='/new_target/')
response = self.client.get('/initial')
self.assertRedirects(response, '/new_target/', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_with_append_slash_and_query_string(self):
Redirect.objects.create(site=self.site, old_path='/initial/?foo', new_path='/new_target/')
response = self.client.get('/initial?foo')
self.assertRedirects(response, '/new_target/', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_not_found_with_append_slash(self):
"""
Exercise the second Redirect.DoesNotExist branch in
RedirectFallbackMiddleware.
"""
response = self.client.get('/test')
self.assertEqual(response.status_code, 404)
def test_redirect_shortcircuits_non_404_response(self):
"""RedirectFallbackMiddleware short-circuits on non-404 requests."""
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_response_gone(self):
"""When the redirect target is '', return a 410"""
Redirect.objects.create(site=self.site, old_path='/initial', new_path='')
response = self.client.get('/initial')
self.assertEqual(response.status_code, 410)
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
def test_sites_not_installed(self):
with self.assertRaises(ImproperlyConfigured):
RedirectFallbackMiddleware()
class OverriddenRedirectFallbackMiddleware(RedirectFallbackMiddleware):
# Use HTTP responses different from the defaults
response_gone_class = HttpResponseForbidden
response_redirect_class = HttpResponseRedirect
@modify_settings(MIDDLEWARE={'append': 'redirects_tests.tests.OverriddenRedirectFallbackMiddleware'})
@override_settings(SITE_ID=1)
class OverriddenRedirectMiddlewareTests(TestCase):
def setUp(self):
self.site = Site.objects.get(pk=settings.SITE_ID)
def test_response_gone_class(self):
Redirect.objects.create(site=self.site, old_path='/initial/', new_path='')
response = self.client.get('/initial/')
self.assertEqual(response.status_code, 403)
def test_response_redirect_class(self):
Redirect.objects.create(site=self.site, old_path='/initial/', new_path='/new_target/')
response = self.client.get('/initial/')
self.assertEqual(response.status_code, 302)
| bsd-3-clause | -1,368,935,741,046,520,000 | 1,819,602,954,494,141,400 | 43.976471 | 105 | 0.711745 | false |
aricchen/openHR | openerp/addons/l10n_cn/__openerp__.py | 91 | 1827 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2009 Gábor Dukai
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': '中国会计科目表 - Accounting',
'version': '1.0',
'category': 'Localization/Account Charts',
'author': 'openerp-china.org',
'maintainer':'openerp-china.org',
'website':'http://openerp-china.org',
'url': 'http://code.google.com/p/openerp-china/source/browse/#svn/trunk/l10n_cn',
'description': """
添加中文省份数据
科目类型\会计科目表模板\增值税\辅助核算类别\管理会计凭证簿\财务会计凭证簿
============================================================
""",
'depends': ['base','account'],
'demo': [],
'data': [
'account_chart.xml',
'l10n_chart_cn_wizard.xml',
'base_data.xml',
],
'license': 'GPL-3',
'auto_install': False,
'installable': True,
'images': ['images/config_chart_l10n_cn.jpeg','images/l10n_cn_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,828,404,807,012,311,600 | 5,932,134,035,978,377,000 | 35.765957 | 85 | 0.573495 | false |
thoughtpalette/thoughts.thoughtpalette.com | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/_phpbuiltins.py | 95 | 122088 | # -*- coding: utf-8 -*-
"""
pygments.lexers._phpbuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file loads the function names and their modules from the
php webpage and generates itself.
Do not alter the MODULES dict by hand!
WARNING: the generation transfers quite much data over your
internet connection. don't run that at home, use
a server ;-)
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
MODULES = {'.NET': ['dotnet_load'],
'APC': ['apc_add',
'apc_bin_dump',
'apc_bin_dumpfile',
'apc_bin_load',
'apc_bin_loadfile',
'apc_cache_info',
'apc_cas',
'apc_clear_cache',
'apc_compile_file',
'apc_dec',
'apc_define_constants',
'apc_delete_file',
'apc_delete',
'apc_exists',
'apc_fetch',
'apc_inc',
'apc_load_constants',
'apc_sma_info',
'apc_store'],
'APD': ['apd_breakpoint',
'apd_callstack',
'apd_clunk',
'apd_continue',
'apd_croak',
'apd_dump_function_table',
'apd_dump_persistent_resources',
'apd_dump_regular_resources',
'apd_echo',
'apd_get_active_symbols',
'apd_set_pprof_trace',
'apd_set_session_trace_socket',
'apd_set_session_trace',
'apd_set_session',
'override_function',
'rename_function'],
'Aliases and deprecated Mysqli': ['mysqli_bind_param',
'mysqli_bind_result',
'mysqli_client_encoding',
'mysqli_connect',
'mysqli_disable_reads_from_master',
'mysqli_disable_rpl_parse',
'mysqli_enable_reads_from_master',
'mysqli_enable_rpl_parse',
'mysqli_escape_string',
'mysqli_execute',
'mysqli_fetch',
'mysqli_get_metadata',
'mysqli_master_query',
'mysqli_param_count',
'mysqli_report',
'mysqli_rpl_parse_enabled',
'mysqli_rpl_probe',
'mysqli_rpl_query_type',
'mysqli_send_long_data',
'mysqli_send_query',
'mysqli_set_opt',
'mysqli_slave_query'],
'Apache': ['apache_child_terminate',
'apache_get_modules',
'apache_get_version',
'apache_getenv',
'apache_lookup_uri',
'apache_note',
'apache_request_headers',
'apache_reset_timeout',
'apache_response_headers',
'apache_setenv',
'getallheaders',
'virtual'],
'Array': ['array_change_key_case',
'array_chunk',
'array_combine',
'array_count_values',
'array_diff_assoc',
'array_diff_key',
'array_diff_uassoc',
'array_diff_ukey',
'array_diff',
'array_fill_keys',
'array_fill',
'array_filter',
'array_flip',
'array_intersect_assoc',
'array_intersect_key',
'array_intersect_uassoc',
'array_intersect_ukey',
'array_intersect',
'array_key_exists',
'array_keys',
'array_map',
'array_merge_recursive',
'array_merge',
'array_multisort',
'array_pad',
'array_pop',
'array_product',
'array_push',
'array_rand',
'array_reduce',
'array_replace_recursive',
'array_replace',
'array_reverse',
'array_search',
'array_shift',
'array_slice',
'array_splice',
'array_sum',
'array_udiff_assoc',
'array_udiff_uassoc',
'array_udiff',
'array_uintersect_assoc',
'array_uintersect_uassoc',
'array_uintersect',
'array_unique',
'array_unshift',
'array_values',
'array_walk_recursive',
'array_walk',
'array',
'arsort',
'asort',
'compact',
'count',
'current',
'each',
'end',
'extract',
'in_array',
'key',
'krsort',
'ksort',
'list',
'natcasesort',
'natsort',
'next',
'pos',
'prev',
'range',
'reset',
'rsort',
'shuffle',
'sizeof',
'sort',
'uasort',
'uksort',
'usort'],
'BBCode': ['bbcode_add_element',
'bbcode_add_smiley',
'bbcode_create',
'bbcode_destroy',
'bbcode_parse',
'bbcode_set_arg_parser',
'bbcode_set_flags'],
'BC Math': ['bcadd',
'bccomp',
'bcdiv',
'bcmod',
'bcmul',
'bcpow',
'bcpowmod',
'bcscale',
'bcsqrt',
'bcsub'],
'Bzip2': ['bzclose',
'bzcompress',
'bzdecompress',
'bzerrno',
'bzerror',
'bzerrstr',
'bzflush',
'bzopen',
'bzread',
'bzwrite'],
'COM': ['com_addref',
'com_create_guid',
'com_event_sink',
'com_get_active_object',
'com_get',
'com_invoke',
'com_isenum',
'com_load_typelib',
'com_load',
'com_message_pump',
'com_print_typeinfo',
'com_propget',
'com_propput',
'com_propset',
'com_release',
'com_set',
'variant_abs',
'variant_add',
'variant_and',
'variant_cast',
'variant_cat',
'variant_cmp',
'variant_date_from_timestamp',
'variant_date_to_timestamp',
'variant_div',
'variant_eqv',
'variant_fix',
'variant_get_type',
'variant_idiv',
'variant_imp',
'variant_int',
'variant_mod',
'variant_mul',
'variant_neg',
'variant_not',
'variant_or',
'variant_pow',
'variant_round',
'variant_set_type',
'variant_set',
'variant_sub',
'variant_xor'],
'CUBRID': ['cubrid_affected_rows',
'cubrid_bind',
'cubrid_close_prepare',
'cubrid_close_request',
'cubrid_col_get',
'cubrid_col_size',
'cubrid_column_names',
'cubrid_column_types',
'cubrid_commit',
'cubrid_connect_with_url',
'cubrid_connect',
'cubrid_current_oid',
'cubrid_disconnect',
'cubrid_drop',
'cubrid_error_code_facility',
'cubrid_error_code',
'cubrid_error_msg',
'cubrid_execute',
'cubrid_fetch',
'cubrid_free_result',
'cubrid_get_charset',
'cubrid_get_class_name',
'cubrid_get_client_info',
'cubrid_get_db_parameter',
'cubrid_get_server_info',
'cubrid_get',
'cubrid_insert_id',
'cubrid_is_instance',
'cubrid_lob_close',
'cubrid_lob_export',
'cubrid_lob_get',
'cubrid_lob_send',
'cubrid_lob_size',
'cubrid_lock_read',
'cubrid_lock_write',
'cubrid_move_cursor',
'cubrid_num_cols',
'cubrid_num_rows',
'cubrid_prepare',
'cubrid_put',
'cubrid_rollback',
'cubrid_schema',
'cubrid_seq_drop',
'cubrid_seq_insert',
'cubrid_seq_put',
'cubrid_set_add',
'cubrid_set_drop',
'cubrid_version'],
'Cairo': ['cairo_create',
'cairo_font_face_get_type',
'cairo_font_face_status',
'cairo_font_options_create',
'cairo_font_options_equal',
'cairo_font_options_get_antialias',
'cairo_font_options_get_hint_metrics',
'cairo_font_options_get_hint_style',
'cairo_font_options_get_subpixel_order',
'cairo_font_options_hash',
'cairo_font_options_merge',
'cairo_font_options_set_antialias',
'cairo_font_options_set_hint_metrics',
'cairo_font_options_set_hint_style',
'cairo_font_options_set_subpixel_order',
'cairo_font_options_status',
'cairo_format_stride_for_width',
'cairo_image_surface_create_for_data',
'cairo_image_surface_create_from_png',
'cairo_image_surface_create',
'cairo_image_surface_get_data',
'cairo_image_surface_get_format',
'cairo_image_surface_get_height',
'cairo_image_surface_get_stride',
'cairo_image_surface_get_width',
'cairo_matrix_create_scale',
'cairo_matrix_create_translate',
'cairo_matrix_invert',
'cairo_matrix_multiply',
'cairo_matrix_rotate',
'cairo_matrix_transform_distance',
'cairo_matrix_transform_point',
'cairo_matrix_translate',
'cairo_pattern_add_color_stop_rgb',
'cairo_pattern_add_color_stop_rgba',
'cairo_pattern_create_for_surface',
'cairo_pattern_create_linear',
'cairo_pattern_create_radial',
'cairo_pattern_create_rgb',
'cairo_pattern_create_rgba',
'cairo_pattern_get_color_stop_count',
'cairo_pattern_get_color_stop_rgba',
'cairo_pattern_get_extend',
'cairo_pattern_get_filter',
'cairo_pattern_get_linear_points',
'cairo_pattern_get_matrix',
'cairo_pattern_get_radial_circles',
'cairo_pattern_get_rgba',
'cairo_pattern_get_surface',
'cairo_pattern_get_type',
'cairo_pattern_set_extend',
'cairo_pattern_set_filter',
'cairo_pattern_set_matrix',
'cairo_pattern_status',
'cairo_pdf_surface_create',
'cairo_pdf_surface_set_size',
'cairo_ps_get_levels',
'cairo_ps_level_to_string',
'cairo_ps_surface_create',
'cairo_ps_surface_dsc_begin_page_setup',
'cairo_ps_surface_dsc_begin_setup',
'cairo_ps_surface_dsc_comment',
'cairo_ps_surface_get_eps',
'cairo_ps_surface_restrict_to_level',
'cairo_ps_surface_set_eps',
'cairo_ps_surface_set_size',
'cairo_scaled_font_create',
'cairo_scaled_font_extents',
'cairo_scaled_font_get_ctm',
'cairo_scaled_font_get_font_face',
'cairo_scaled_font_get_font_matrix',
'cairo_scaled_font_get_font_options',
'cairo_scaled_font_get_scale_matrix',
'cairo_scaled_font_get_type',
'cairo_scaled_font_glyph_extents',
'cairo_scaled_font_status',
'cairo_scaled_font_text_extents',
'cairo_surface_copy_page',
'cairo_surface_create_similar',
'cairo_surface_finish',
'cairo_surface_flush',
'cairo_surface_get_content',
'cairo_surface_get_device_offset',
'cairo_surface_get_font_options',
'cairo_surface_get_type',
'cairo_surface_mark_dirty_rectangle',
'cairo_surface_mark_dirty',
'cairo_surface_set_device_offset',
'cairo_surface_set_fallback_resolution',
'cairo_surface_show_page',
'cairo_surface_status',
'cairo_surface_write_to_png',
'cairo_svg_surface_create',
'cairo_svg_surface_restrict_to_version',
'cairo_svg_version_to_string'],
'Calendar': ['cal_days_in_month',
'cal_from_jd',
'cal_info',
'cal_to_jd',
'easter_date',
'easter_days',
'FrenchToJD',
'GregorianToJD',
'JDDayOfWeek',
'JDMonthName',
'JDToFrench',
'JDToGregorian',
'jdtojewish',
'JDToJulian',
'jdtounix',
'JewishToJD',
'JulianToJD',
'unixtojd'],
'Classes/Object': ['call_user_method_array',
'call_user_method',
'class_alias',
'class_exists',
'get_called_class',
'get_class_methods',
'get_class_vars',
'get_class',
'get_declared_classes',
'get_declared_interfaces',
'get_object_vars',
'get_parent_class',
'interface_exists',
'is_a',
'is_subclass_of',
'method_exists',
'property_exists'],
'Classkit': ['classkit_import',
'classkit_method_add',
'classkit_method_copy',
'classkit_method_redefine',
'classkit_method_remove',
'classkit_method_rename'],
'Crack': ['crack_check',
'crack_closedict',
'crack_getlastmessage',
'crack_opendict'],
'Ctype': ['ctype_alnum',
'ctype_alpha',
'ctype_cntrl',
'ctype_digit',
'ctype_graph',
'ctype_lower',
'ctype_print',
'ctype_punct'],
'Cyrus': ['cyrus_authenticate',
'cyrus_bind',
'cyrus_close',
'cyrus_connect',
'cyrus_query',
'cyrus_unbind'],
'DB++': ['dbplus_add',
'dbplus_aql',
'dbplus_chdir',
'dbplus_close',
'dbplus_curr',
'dbplus_errcode',
'dbplus_errno',
'dbplus_find',
'dbplus_first',
'dbplus_flush',
'dbplus_freealllocks',
'dbplus_freelock',
'dbplus_freerlocks',
'dbplus_getlock',
'dbplus_getunique',
'dbplus_info',
'dbplus_last',
'dbplus_lockrel',
'dbplus_next',
'dbplus_open',
'dbplus_prev',
'dbplus_rchperm',
'dbplus_rcreate',
'dbplus_rcrtexact',
'dbplus_rcrtlike',
'dbplus_resolve',
'dbplus_restorepos',
'dbplus_rkeys',
'dbplus_ropen',
'dbplus_rquery',
'dbplus_rrename',
'dbplus_rsecindex',
'dbplus_runlink',
'dbplus_rzap',
'dbplus_savepos',
'dbplus_setindex',
'dbplus_setindexbynumber',
'dbplus_sql',
'dbplus_tcl',
'dbplus_tremove',
'dbplus_undo',
'dbplus_undoprepare',
'dbplus_unlockrel',
'dbplus_unselect',
'dbplus_update',
'dbplus_xlockrel',
'dbplus_xunlockrel'],
'DBA': ['dba_close',
'dba_delete',
'dba_exists',
'dba_fetch',
'dba_firstkey',
'dba_handlers',
'dba_insert',
'dba_key_split',
'dba_list',
'dba_nextkey',
'dba_open',
'dba_optimize',
'dba_popen',
'dba_replace',
'dba_sync'],
'DOM': ['dom_import_simplexml'],
'DOM XML (PHP 4)': ['domxml_new_doc',
'domxml_open_file',
'domxml_open_mem',
'domxml_version',
'domxml_xmltree',
'domxml_xslt_stylesheet_doc',
'domxml_xslt_stylesheet_file',
'domxml_xslt_stylesheet',
'domxml_xslt_version',
'xpath_eval_expression',
'xpath_eval',
'xpath_new_context',
'xpath_register_ns_auto',
'xpath_register_ns',
'xptr_eval',
'xptr_new_context'],
'Date/Time': ['checkdate',
'date_add',
'date_create_from_format',
'date_create',
'date_date_set',
'date_default_timezone_get',
'date_default_timezone_set',
'date_diff',
'date_format',
'date_get_last_errors',
'date_interval_create_from_date_string',
'date_interval_format',
'date_isodate_set',
'date_modify',
'date_offset_get',
'date_parse_from_format',
'date_parse',
'date_sub',
'date_sun_info',
'date_sunrise',
'date_sunset',
'date_time_set',
'date_timestamp_get',
'date_timestamp_set',
'date_timezone_get',
'date_timezone_set',
'date',
'getdate',
'gettimeofday',
'gmdate',
'gmmktime',
'gmstrftime',
'idate',
'localtime',
'microtime',
'mktime',
'strftime',
'strptime',
'strtotime',
'time',
'timezone_abbreviations_list',
'timezone_identifiers_list',
'timezone_location_get',
'timezone_name_from_abbr',
'timezone_name_get',
'timezone_offset_get',
'timezone_open',
'timezone_transitions_get',
'timezone_version_get'],
'Direct IO': ['dio_close', 'dio_fcntl', 'dio_open'],
'Directory': ['chdir',
'chroot',
'closedir',
'getcwd',
'opendir',
'readdir',
'rewinddir',
'scandir'],
'Enchant': ['enchant_broker_describe',
'enchant_broker_dict_exists',
'enchant_broker_free_dict',
'enchant_broker_free',
'enchant_broker_get_error',
'enchant_broker_init',
'enchant_broker_list_dicts',
'enchant_broker_request_dict',
'enchant_broker_request_pwl_dict',
'enchant_broker_set_ordering',
'enchant_dict_add_to_personal',
'enchant_dict_add_to_session',
'enchant_dict_check',
'enchant_dict_describe',
'enchant_dict_get_error',
'enchant_dict_is_in_session',
'enchant_dict_quick_check',
'enchant_dict_store_replacement',
'enchant_dict_suggest'],
'Error Handling': ['debug_backtrace',
'debug_print_backtrace',
'error_get_last',
'error_log',
'error_reporting',
'restore_error_handler',
'restore_exception_handler',
'set_error_handler',
'set_exception_handler',
'trigger_error',
'user_error'],
'Exif': ['exif_imagetype',
'exif_read_data',
'exif_tagname',
'exif_thumbnail',
'read_exif_data'],
'Expect': ['expect_expectl'],
'FAM': ['fam_cancel_monitor',
'fam_close',
'fam_monitor_collection',
'fam_monitor_directory',
'fam_monitor_file',
'fam_next_event',
'fam_open',
'fam_pending',
'fam_resume_monitor',
'fam_suspend_monitor'],
'FDF': ['fdf_add_doc_javascript',
'fdf_add_template',
'fdf_close',
'fdf_create',
'fdf_enum_values',
'fdf_errno',
'fdf_error',
'fdf_get_ap',
'fdf_get_attachment',
'fdf_get_encoding',
'fdf_get_file',
'fdf_get_flags',
'fdf_get_opt',
'fdf_get_status',
'fdf_get_value',
'fdf_get_version',
'fdf_header',
'fdf_next_field_name',
'fdf_open_string',
'fdf_open',
'fdf_remove_item',
'fdf_save_string',
'fdf_save',
'fdf_set_ap',
'fdf_set_encoding',
'fdf_set_file',
'fdf_set_flags',
'fdf_set_javascript_action',
'fdf_set_on_import_javascript',
'fdf_set_opt',
'fdf_set_status',
'fdf_set_submit_form_action',
'fdf_set_target_frame',
'fdf_set_value',
'fdf_set_version'],
'FTP': ['ftp_alloc',
'ftp_cdup',
'ftp_chdir',
'ftp_chmod',
'ftp_close',
'ftp_connect',
'ftp_delete',
'ftp_exec',
'ftp_fget',
'ftp_fput',
'ftp_get_option',
'ftp_get',
'ftp_login',
'ftp_mdtm',
'ftp_mkdir',
'ftp_nb_continue',
'ftp_nb_fget',
'ftp_nb_fput',
'ftp_nb_get',
'ftp_nb_put',
'ftp_nlist',
'ftp_pasv',
'ftp_put',
'ftp_pwd',
'ftp_quit',
'ftp_raw',
'ftp_rawlist',
'ftp_rename',
'ftp_rmdir',
'ftp_set_option',
'ftp_site',
'ftp_size',
'ftp_ssl_connect',
'ftp_systype'],
'Fileinfo': ['finfo_buffer',
'finfo_close',
'finfo_file',
'finfo_open',
'finfo_set_flags',
'mime_content_type'],
'Filesystem': ['basename',
'chgrp',
'chmod',
'chown',
'clearstatcache',
'copy',
'dirname',
'disk_free_space',
'disk_total_space',
'diskfreespace',
'fclose',
'feof',
'fflush',
'fgetc',
'fgetcsv',
'fgets',
'fgetss',
'file_exists',
'file_get_contents',
'file_put_contents',
'file',
'fileatime',
'filectime',
'filegroup',
'fileinode',
'filemtime',
'fileowner',
'fileperms',
'filesize',
'filetype',
'flock',
'fnmatch',
'fopen',
'fpassthru',
'fputcsv',
'fputs',
'fread',
'fscanf',
'fseek',
'fstat',
'ftell',
'ftruncate',
'fwrite',
'glob',
'is_dir',
'is_executable',
'is_file',
'is_link',
'is_readable',
'is_uploaded_file',
'is_writable',
'is_writeable',
'lchgrp',
'lchown',
'link',
'linkinfo',
'lstat',
'mkdir',
'move_uploaded_file',
'parse_ini_file',
'parse_ini_string',
'pathinfo',
'pclose',
'popen',
'readfile',
'readlink',
'realpath_cache_get',
'realpath_cache_size',
'realpath',
'rename',
'rewind',
'rmdir',
'set_file_buffer',
'stat',
'symlink',
'tempnam',
'tmpfile',
'touch',
'umask',
'unlink'],
'Filter': ['filter_has_var',
'filter_id',
'filter_input_array',
'filter_input',
'filter_list',
'filter_var_array',
'filter_var'],
'Firebird/InterBase': ['ibase_add_user',
'ibase_affected_rows',
'ibase_backup',
'ibase_blob_add',
'ibase_blob_cancel',
'ibase_blob_close',
'ibase_blob_create',
'ibase_blob_echo',
'ibase_blob_get',
'ibase_blob_import',
'ibase_blob_info',
'ibase_blob_open',
'ibase_close',
'ibase_commit_ret',
'ibase_commit',
'ibase_connect',
'ibase_db_info',
'ibase_delete_user',
'ibase_drop_db',
'ibase_errcode',
'ibase_errmsg',
'ibase_execute',
'ibase_fetch_assoc',
'ibase_fetch_object',
'ibase_fetch_row',
'ibase_field_info',
'ibase_free_event_handler',
'ibase_free_query',
'ibase_free_result',
'ibase_gen_id',
'ibase_maintain_db',
'ibase_modify_user',
'ibase_name_result',
'ibase_num_fields',
'ibase_num_params',
'ibase_param_info',
'ibase_pconnect',
'ibase_prepare',
'ibase_query',
'ibase_restore',
'ibase_rollback_ret',
'ibase_rollback',
'ibase_server_info',
'ibase_service_attach',
'ibase_service_detach',
'ibase_set_event_handler',
'ibase_timefmt',
'ibase_trans',
'ibase_wait_event'],
'FriBiDi': ['fribidi_log2vis'],
'FrontBase': ['fbsql_affected_rows',
'fbsql_autocommit',
'fbsql_blob_size',
'fbsql_change_user',
'fbsql_clob_size',
'fbsql_close',
'fbsql_commit',
'fbsql_connect',
'fbsql_create_blob',
'fbsql_create_clob',
'fbsql_create_db',
'fbsql_data_seek',
'fbsql_database_password',
'fbsql_database',
'fbsql_db_query',
'fbsql_db_status',
'fbsql_drop_db',
'fbsql_errno',
'fbsql_error',
'fbsql_fetch_array',
'fbsql_fetch_assoc',
'fbsql_fetch_field',
'fbsql_fetch_lengths',
'fbsql_fetch_object',
'fbsql_fetch_row',
'fbsql_field_flags',
'fbsql_field_len',
'fbsql_field_name',
'fbsql_field_seek',
'fbsql_field_table',
'fbsql_field_type',
'fbsql_free_result',
'fbsql_get_autostart_info',
'fbsql_hostname',
'fbsql_insert_id',
'fbsql_list_dbs',
'fbsql_list_fields',
'fbsql_list_tables',
'fbsql_next_result',
'fbsql_num_fields',
'fbsql_num_rows',
'fbsql_password',
'fbsql_pconnect',
'fbsql_query',
'fbsql_read_blob',
'fbsql_read_clob',
'fbsql_result',
'fbsql_rollback',
'fbsql_rows_fetched',
'fbsql_select_db',
'fbsql_set_characterset',
'fbsql_set_lob_mode',
'fbsql_set_password',
'fbsql_set_transaction',
'fbsql_start_db',
'fbsql_stop_db',
'fbsql_table_name',
'fbsql_tablename',
'fbsql_username',
'fbsql_warnings'],
'Function handling': ['call_user_func_array',
'call_user_func',
'create_function',
'forward_static_call_array',
'forward_static_call',
'func_get_arg',
'func_get_args',
'func_num_args',
'function_exists',
'get_defined_functions',
'register_shutdown_function',
'register_tick_function',
'unregister_tick_function'],
'GD and Image': ['gd_info',
'getimagesize',
'image_type_to_extension',
'image_type_to_mime_type'],
'GMP': ['gmp_abs',
'gmp_add',
'gmp_and',
'gmp_clrbit',
'gmp_cmp',
'gmp_com',
'gmp_div_q',
'gmp_div_qr',
'gmp_div_r',
'gmp_div',
'gmp_divexact',
'gmp_fact',
'gmp_gcd',
'gmp_gcdext',
'gmp_hamdist',
'gmp_init',
'gmp_intval',
'gmp_invert',
'gmp_jacobi',
'gmp_legendre',
'gmp_mod',
'gmp_mul',
'gmp_neg',
'gmp_nextprime',
'gmp_or',
'gmp_perfect_square',
'gmp_popcount',
'gmp_pow',
'gmp_powm',
'gmp_prob_prime',
'gmp_random',
'gmp_scan0',
'gmp_scan1',
'gmp_setbit',
'gmp_sign',
'gmp_sqrt',
'gmp_sqrtrem',
'gmp_strval',
'gmp_sub',
'gmp_testbit',
'gmp_xor'],
'GeoIP': ['geoip_continent_code_by_name',
'geoip_country_code_by_name',
'geoip_country_code3_by_name',
'geoip_country_name_by_name',
'geoip_database_info',
'geoip_db_avail',
'geoip_db_filename',
'geoip_db_get_all_info',
'geoip_id_by_name',
'geoip_isp_by_name',
'geoip_org_by_name',
'geoip_record_by_name',
'geoip_region_by_name',
'geoip_region_name_by_code',
'geoip_time_zone_by_country_and_region'],
'Gettext': ['bind_textdomain_codeset',
'bindtextdomain',
'dcgettext',
'dcngettext',
'dgettext',
'dngettext',
'gettext',
'ngettext',
'textdomain'],
'GnuPG': ['gnupg_adddecryptkey',
'gnupg_addencryptkey',
'gnupg_addsignkey',
'gnupg_cleardecryptkeys',
'gnupg_clearencryptkeys',
'gnupg_clearsignkeys',
'gnupg_decrypt',
'gnupg_decryptverify',
'gnupg_encrypt',
'gnupg_encryptsign',
'gnupg_export',
'gnupg_geterror',
'gnupg_getprotocol',
'gnupg_import',
'gnupg_init',
'gnupg_keyinfo',
'gnupg_setarmor',
'gnupg_seterrormode',
'gnupg_setsignmode',
'gnupg_sign',
'gnupg_verify'],
'Gopher': ['gopher_parsedir'],
'Grapheme': ['grapheme_extract',
'grapheme_stripos',
'grapheme_stristr',
'grapheme_strlen',
'grapheme_strpos',
'grapheme_strripos',
'grapheme_strrpos',
'grapheme_strstr',
'grapheme_substr'],
'Gupnp': ['gupnp_context_get_host_ip',
'gupnp_context_get_port',
'gupnp_context_get_subscription_timeout',
'gupnp_context_host_path',
'gupnp_context_new',
'gupnp_context_set_subscription_timeout',
'gupnp_context_timeout_add',
'gupnp_context_unhost_path',
'gupnp_control_point_browse_start',
'gupnp_control_point_browse_stop',
'gupnp_control_point_callback_set',
'gupnp_control_point_new',
'gupnp_device_action_callback_set',
'gupnp_device_info_get_service',
'gupnp_device_info_get',
'gupnp_root_device_get_available',
'gupnp_root_device_get_relative_location',
'gupnp_root_device_new',
'gupnp_root_device_set_available',
'gupnp_root_device_start',
'gupnp_root_device_stop',
'gupnp_service_action_get',
'gupnp_service_action_return_error',
'gupnp_service_action_return',
'gupnp_service_action_set',
'gupnp_service_freeze_notify',
'gupnp_service_info_get_introspection',
'gupnp_service_info_get',
'gupnp_service_introspection_get_state_variable',
'gupnp_service_notify',
'gupnp_service_proxy_action_get',
'gupnp_service_proxy_action_set',
'gupnp_service_proxy_add_notify',
'gupnp_service_proxy_callback_set',
'gupnp_service_proxy_get_subscribed',
'gupnp_service_proxy_remove_notify',
'gupnp_service_proxy_set_subscribed',
'gupnp_service_thaw_notify'],
'HTTP': ['http_cache_etag',
'http_cache_last_modified',
'http_chunked_decode',
'http_deflate',
'http_inflate',
'http_build_cookie',
'http_date',
'http_get_request_body_stream',
'http_get_request_body',
'http_get_request_headers',
'http_match_etag',
'http_match_modified',
'http_match_request_header',
'http_support',
'http_negotiate_charset',
'http_negotiate_content_type',
'http_negotiate_language',
'ob_deflatehandler',
'ob_etaghandler',
'ob_inflatehandler',
'http_parse_cookie',
'http_parse_headers',
'http_parse_message',
'http_parse_params',
'http_persistent_handles_clean',
'http_persistent_handles_count',
'http_persistent_handles_ident',
'http_get',
'http_head',
'http_post_data',
'http_post_fields',
'http_put_data',
'http_put_file',
'http_put_stream',
'http_request_body_encode',
'http_request_method_exists',
'http_request_method_name',
'http_request_method_register',
'http_request_method_unregister',
'http_request',
'http_redirect',
'http_send_content_disposition',
'http_send_content_type',
'http_send_data',
'http_send_file',
'http_send_last_modified',
'http_send_status',
'http_send_stream',
'http_throttle',
'http_build_str',
'http_build_url'],
'Hash': ['hash_algos',
'hash_copy',
'hash_file',
'hash_final',
'hash_hmac_file',
'hash_hmac',
'hash_init',
'hash_update_file',
'hash_update_stream',
'hash_update',
'hash'],
'Hyperwave': ['hw_Array2Objrec',
'hw_changeobject',
'hw_Children',
'hw_ChildrenObj',
'hw_Close',
'hw_Connect',
'hw_connection_info',
'hw_cp',
'hw_Deleteobject',
'hw_DocByAnchor',
'hw_DocByAnchorObj',
'hw_Document_Attributes',
'hw_Document_BodyTag',
'hw_Document_Content',
'hw_Document_SetContent',
'hw_Document_Size',
'hw_dummy',
'hw_EditText',
'hw_Error',
'hw_ErrorMsg',
'hw_Free_Document',
'hw_GetAnchors',
'hw_GetAnchorsObj',
'hw_GetAndLock',
'hw_GetChildColl',
'hw_GetChildCollObj',
'hw_GetChildDocColl',
'hw_GetChildDocCollObj',
'hw_GetObject',
'hw_GetObjectByQuery',
'hw_GetObjectByQueryColl',
'hw_GetObjectByQueryCollObj',
'hw_GetObjectByQueryObj',
'hw_GetParents',
'hw_GetParentsObj',
'hw_getrellink',
'hw_GetRemote',
'hw_getremotechildren',
'hw_GetSrcByDestObj',
'hw_GetText',
'hw_getusername',
'hw_Identify',
'hw_InCollections',
'hw_Info',
'hw_InsColl',
'hw_InsDoc',
'hw_insertanchors',
'hw_InsertDocument',
'hw_InsertObject',
'hw_mapid',
'hw_Modifyobject',
'hw_mv',
'hw_New_Document',
'hw_objrec2array',
'hw_Output_Document',
'hw_pConnect',
'hw_PipeDocument',
'hw_Root',
'hw_setlinkroot',
'hw_stat',
'hw_Unlock',
'hw_Who'],
'Hyperwave API': ['hw_api_attribute',
'hwapi_hgcsp',
'hw_api_content',
'hw_api_object'],
'IBM DB2': ['db2_autocommit',
'db2_bind_param',
'db2_client_info',
'db2_close',
'db2_column_privileges',
'db2_columns',
'db2_commit',
'db2_conn_error',
'db2_conn_errormsg',
'db2_connect',
'db2_cursor_type',
'db2_escape_string',
'db2_exec',
'db2_execute',
'db2_fetch_array',
'db2_fetch_assoc',
'db2_fetch_both',
'db2_fetch_object',
'db2_fetch_row',
'db2_field_display_size',
'db2_field_name',
'db2_field_num',
'db2_field_precision',
'db2_field_scale',
'db2_field_type',
'db2_field_width',
'db2_foreign_keys',
'db2_free_result',
'db2_free_stmt',
'db2_get_option',
'db2_last_insert_id'],
'ID3': ['id3_get_frame_long_name',
'id3_get_frame_short_name',
'id3_get_genre_id',
'id3_get_genre_list',
'id3_get_genre_name',
'id3_get_tag',
'id3_get_version',
'id3_remove_tag',
'id3_set_tag'],
'IDN': ['idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'],
'IIS': ['iis_add_server',
'iis_get_dir_security',
'iis_get_script_map',
'iis_get_server_by_comment',
'iis_get_server_by_path',
'iis_get_server_rights',
'iis_get_service_state',
'iis_remove_server',
'iis_set_app_settings',
'iis_set_dir_security',
'iis_set_script_map',
'iis_set_server_rights',
'iis_start_server',
'iis_start_service',
'iis_stop_server',
'iis_stop_service'],
'IMAP': ['imap_8bit',
'imap_alerts',
'imap_append',
'imap_base64',
'imap_binary',
'imap_body',
'imap_bodystruct',
'imap_check',
'imap_clearflag_full',
'imap_close',
'imap_createmailbox',
'imap_delete',
'imap_deletemailbox',
'imap_errors',
'imap_expunge',
'imap_fetch_overview',
'imap_fetchbody',
'imap_fetchheader',
'imap_fetchmime',
'imap_fetchstructure',
'imap_gc',
'imap_get_quota',
'imap_get_quotaroot',
'imap_getacl',
'imap_getmailboxes',
'imap_getsubscribed',
'imap_header',
'imap_headerinfo',
'imap_headers',
'imap_last_error',
'imap_list',
'imap_listmailbox',
'imap_listscan',
'imap_listsubscribed',
'imap_lsub',
'imap_mail_compose',
'imap_mail_copy',
'imap_mail_move',
'imap_mail',
'imap_mailboxmsginfo',
'imap_mime_header_decode',
'imap_msgno',
'imap_num_msg',
'imap_num_recent',
'imap_open',
'imap_ping',
'imap_qprint',
'imap_renamemailbox',
'imap_reopen',
'imap_rfc822_parse_adrlist',
'imap_rfc822_parse_headers',
'imap_rfc822_write_address',
'imap_savebody',
'imap_scanmailbox',
'imap_search',
'imap_set_quota',
'imap_setacl',
'imap_setflag_full',
'imap_sort',
'imap_status',
'imap_subscribe',
'imap_thread',
'imap_timeout',
'imap_uid',
'imap_undelete',
'imap_unsubscribe',
'imap_utf7_decode',
'imap_utf7_encode',
'imap_utf8'],
'Informix': ['ifx_affected_rows',
'ifx_blobinfile_mode',
'ifx_byteasvarchar',
'ifx_close',
'ifx_connect',
'ifx_copy_blob',
'ifx_create_blob',
'ifx_create_char',
'ifx_do',
'ifx_error',
'ifx_errormsg',
'ifx_fetch_row',
'ifx_fieldproperties',
'ifx_fieldtypes',
'ifx_free_blob',
'ifx_free_char',
'ifx_free_result',
'ifx_get_blob',
'ifx_get_char',
'ifx_getsqlca',
'ifx_htmltbl_result',
'ifx_nullformat',
'ifx_num_fields',
'ifx_num_rows',
'ifx_pconnect',
'ifx_prepare',
'ifx_query',
'ifx_textasvarchar',
'ifx_update_blob',
'ifx_update_char',
'ifxus_close_slob',
'ifxus_create_slob',
'ifxus_free_slob',
'ifxus_open_slob',
'ifxus_read_slob',
'ifxus_seek_slob',
'ifxus_tell_slob',
'ifxus_write_slob'],
'Ingres': ['ingres_autocommit_state',
'ingres_autocommit',
'ingres_charset',
'ingres_close',
'ingres_commit',
'ingres_connect',
'ingres_cursor',
'ingres_errno',
'ingres_error',
'ingres_errsqlstate',
'ingres_escape_string',
'ingres_execute',
'ingres_fetch_array',
'ingres_fetch_assoc',
'ingres_fetch_object',
'ingres_fetch_proc_return',
'ingres_fetch_row',
'ingres_field_length',
'ingres_field_name',
'ingres_field_nullable',
'ingres_field_precision',
'ingres_field_scale',
'ingres_field_type',
'ingres_free_result',
'ingres_next_error',
'ingres_num_fields',
'ingres_num_rows',
'ingres_pconnect',
'ingres_prepare',
'ingres_query',
'ingres_result_seek',
'ingres_rollback',
'ingres_set_environment',
'ingres_unbuffered_query'],
'Inotify': ['inotify_add_watch',
'inotify_init',
'inotify_queue_len',
'inotify_read',
'inotify_rm_watch'],
'JSON': ['json_decode', 'json_encode', 'json_last_error'],
'Java': ['java_last_exception_clear', 'java_last_exception_get'],
'Judy': ['judy_type', 'judy_version'],
'KADM5': ['kadm5_chpass_principal',
'kadm5_create_principal',
'kadm5_delete_principal',
'kadm5_destroy',
'kadm5_flush',
'kadm5_get_policies',
'kadm5_get_principal',
'kadm5_get_principals',
'kadm5_init_with_password',
'kadm5_modify_principal'],
'LDAP': ['ldap_8859_to_t61',
'ldap_add',
'ldap_bind',
'ldap_close',
'ldap_compare',
'ldap_connect',
'ldap_count_entries',
'ldap_delete',
'ldap_dn2ufn',
'ldap_err2str',
'ldap_errno',
'ldap_error',
'ldap_explode_dn',
'ldap_first_attribute',
'ldap_first_entry',
'ldap_first_reference',
'ldap_free_result',
'ldap_get_attributes',
'ldap_get_dn',
'ldap_get_entries',
'ldap_get_option',
'ldap_get_values_len',
'ldap_get_values',
'ldap_list',
'ldap_mod_add',
'ldap_mod_del',
'ldap_mod_replace',
'ldap_modify',
'ldap_next_attribute',
'ldap_next_entry',
'ldap_next_reference',
'ldap_parse_reference',
'ldap_parse_result',
'ldap_read',
'ldap_rename',
'ldap_sasl_bind',
'ldap_search',
'ldap_set_option',
'ldap_set_rebind_proc',
'ldap_sort',
'ldap_start_tls',
'ldap_t61_to_8859',
'ldap_unbind'],
'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
'Libevent': ['event_add',
'event_base_free',
'event_base_loop',
'event_base_loopbreak',
'event_base_loopexit',
'event_base_new',
'event_base_priority_init',
'event_base_set',
'event_buffer_base_set',
'event_buffer_disable',
'event_buffer_enable',
'event_buffer_fd_set',
'event_buffer_free',
'event_buffer_new',
'event_buffer_priority_set',
'event_buffer_read',
'event_buffer_set_callback',
'event_buffer_timeout_set',
'event_buffer_watermark_set',
'event_buffer_write',
'event_del',
'event_free',
'event_new',
'event_set'],
'Lotus Notes': ['notes_body',
'notes_copy_db',
'notes_create_db',
'notes_create_note',
'notes_drop_db',
'notes_find_note',
'notes_header_info',
'notes_list_msgs',
'notes_mark_read',
'notes_mark_unread',
'notes_nav_create',
'notes_search',
'notes_unread',
'notes_version'],
'MCVE': ['m_checkstatus',
'm_completeauthorizations',
'm_connect',
'm_connectionerror',
'm_deletetrans',
'm_destroyconn',
'm_destroyengine',
'm_getcell',
'm_getcellbynum',
'm_getcommadelimited',
'm_getheader',
'm_initconn',
'm_initengine',
'm_iscommadelimited',
'm_maxconntimeout',
'm_monitor',
'm_numcolumns',
'm_numrows',
'm_parsecommadelimited',
'm_responsekeys'],
'Mail': ['ezmlm_hash', 'mail'],
'Mailparse': ['mailparse_determine_best_xfer_encoding',
'mailparse_msg_create',
'mailparse_msg_extract_part_file',
'mailparse_msg_extract_part',
'mailparse_msg_extract_whole_part_file',
'mailparse_msg_free',
'mailparse_msg_get_part_data',
'mailparse_msg_get_part',
'mailparse_msg_get_structure',
'mailparse_msg_parse_file',
'mailparse_msg_parse',
'mailparse_rfc822_parse_addresses',
'mailparse_stream_encode',
'mailparse_uudecode_all'],
'Math': ['abs',
'acos',
'acosh',
'asin',
'asinh',
'atan2',
'atan',
'atanh',
'base_convert',
'bindec',
'ceil',
'cos',
'cosh',
'decbin',
'dechex',
'decoct',
'deg2rad',
'exp',
'expm1'],
'MaxDB': ['maxdb_affected_rows',
'maxdb_autocommit',
'maxdb_bind_param',
'maxdb_bind_result',
'maxdb_change_user',
'maxdb_character_set_name',
'maxdb_client_encoding',
'maxdb_close_long_data',
'maxdb_close',
'maxdb_commit',
'maxdb_connect_errno',
'maxdb_connect_error',
'maxdb_connect',
'maxdb_data_seek',
'maxdb_debug',
'maxdb_disable_reads_from_master',
'maxdb_disable_rpl_parse',
'maxdb_dump_debug_info',
'maxdb_embedded_connect',
'maxdb_enable_reads_from_master',
'maxdb_enable_rpl_parse',
'maxdb_errno',
'maxdb_error',
'maxdb_escape_string',
'maxdb_execute',
'maxdb_fetch_array',
'maxdb_fetch_assoc',
'maxdb_fetch_field_direct',
'maxdb_fetch_field',
'maxdb_fetch_fields',
'maxdb_fetch_lengths',
'maxdb_fetch_object',
'maxdb_fetch_row',
'maxdb_fetch',
'maxdb_field_count',
'maxdb_field_seek',
'maxdb_field_tell',
'maxdb_free_result',
'maxdb_get_client_info',
'maxdb_get_client_version',
'maxdb_get_host_info',
'maxdb_get_metadata',
'maxdb_get_proto_info',
'maxdb_get_server_info',
'maxdb_get_server_version',
'maxdb_info',
'maxdb_init',
'maxdb_insert_id',
'maxdb_kill',
'maxdb_master_query',
'maxdb_more_results',
'maxdb_multi_query',
'maxdb_next_result',
'maxdb_num_fields',
'maxdb_num_rows',
'maxdb_options',
'maxdb_param_count',
'maxdb_ping',
'maxdb_prepare',
'maxdb_query',
'maxdb_real_connect',
'maxdb_real_escape_string',
'maxdb_real_query',
'maxdb_report',
'maxdb_rollback',
'maxdb_rpl_parse_enabled',
'maxdb_rpl_probe',
'maxdb_rpl_query_type',
'maxdb_select_db',
'maxdb_send_long_data',
'maxdb_send_query',
'maxdb_server_end',
'maxdb_server_init',
'maxdb_set_opt',
'maxdb_sqlstate',
'maxdb_ssl_set',
'maxdb_stat',
'maxdb_stmt_affected_rows'],
'Mcrypt': ['mcrypt_cbc',
'mcrypt_cfb',
'mcrypt_create_iv',
'mcrypt_decrypt',
'mcrypt_ecb',
'mcrypt_enc_get_algorithms_name',
'mcrypt_enc_get_block_size',
'mcrypt_enc_get_iv_size',
'mcrypt_enc_get_key_size',
'mcrypt_enc_get_modes_name',
'mcrypt_enc_get_supported_key_sizes',
'mcrypt_enc_is_block_algorithm_mode',
'mcrypt_enc_is_block_algorithm',
'mcrypt_enc_is_block_mode',
'mcrypt_enc_self_test',
'mcrypt_encrypt',
'mcrypt_generic_deinit',
'mcrypt_generic_end',
'mcrypt_generic_init',
'mcrypt_generic',
'mcrypt_get_block_size',
'mcrypt_get_cipher_name',
'mcrypt_get_iv_size',
'mcrypt_get_key_size',
'mcrypt_list_algorithms',
'mcrypt_list_modes',
'mcrypt_module_close',
'mcrypt_module_get_algo_block_size',
'mcrypt_module_get_algo_key_size',
'mcrypt_module_get_supported_key_sizes',
'mcrypt_module_is_block_algorithm_mode',
'mcrypt_module_is_block_algorithm',
'mcrypt_module_is_block_mode',
'mcrypt_module_open',
'mcrypt_module_self_test',
'mcrypt_ofb',
'mdecrypt_generic'],
'Memcache': ['memcache_debug'],
'Mhash': ['mhash_count',
'mhash_get_block_size',
'mhash_get_hash_name',
'mhash_keygen_s2k',
'mhash'],
'Ming': ['ming_keypress',
'ming_setcubicthreshold',
'ming_setscale',
'ming_setswfcompression',
'ming_useconstants',
'ming_useswfversion'],
'Misc.': ['connection_aborted',
'connection_status',
'connection_timeout',
'constant',
'define',
'defined',
'die',
'eval',
'exit',
'get_browser',
'__halt_compiler',
'highlight_file',
'highlight_string',
'ignore_user_abort',
'pack',
'php_check_syntax',
'php_strip_whitespace',
'show_source',
'sleep',
'sys_getloadavg',
'time_nanosleep',
'time_sleep_until',
'uniqid',
'unpack',
'usleep'],
'Mongo': ['bson_decode', 'bson_encode'],
'Msession': ['msession_connect',
'msession_count',
'msession_create',
'msession_destroy',
'msession_disconnect',
'msession_find',
'msession_get_array',
'msession_get_data',
'msession_get',
'msession_inc',
'msession_list',
'msession_listvar',
'msession_lock',
'msession_plugin',
'msession_randstr',
'msession_set_array',
'msession_set_data',
'msession_set',
'msession_timeout',
'msession_uniq',
'msession_unlock'],
'Mssql': ['mssql_bind',
'mssql_close',
'mssql_connect',
'mssql_data_seek',
'mssql_execute',
'mssql_fetch_array',
'mssql_fetch_assoc',
'mssql_fetch_batch',
'mssql_fetch_field',
'mssql_fetch_object',
'mssql_fetch_row',
'mssql_field_length',
'mssql_field_name',
'mssql_field_seek',
'mssql_field_type',
'mssql_free_result',
'mssql_free_statement',
'mssql_get_last_message',
'mssql_guid_string',
'mssql_init',
'mssql_min_error_severity',
'mssql_min_message_severity',
'mssql_next_result',
'mssql_num_fields',
'mssql_num_rows',
'mssql_pconnect',
'mssql_query',
'mssql_result',
'mssql_rows_affected',
'mssql_select_db'],
'Multibyte String': ['mb_check_encoding',
'mb_convert_case',
'mb_convert_encoding',
'mb_convert_kana',
'mb_convert_variables',
'mb_decode_mimeheader',
'mb_decode_numericentity',
'mb_detect_encoding',
'mb_detect_order',
'mb_encode_mimeheader',
'mb_encode_numericentity',
'mb_encoding_aliases',
'mb_ereg_match',
'mb_ereg_replace',
'mb_ereg_search_getpos',
'mb_ereg_search_getregs',
'mb_ereg_search_init',
'mb_ereg_search_pos',
'mb_ereg_search_regs',
'mb_ereg_search_setpos',
'mb_ereg_search',
'mb_ereg',
'mb_eregi_replace',
'mb_eregi',
'mb_get_info',
'mb_http_input',
'mb_http_output',
'mb_internal_encoding',
'mb_language',
'mb_list_encodings',
'mb_output_handler',
'mb_parse_str',
'mb_preferred_mime_name',
'mb_regex_encoding',
'mb_regex_set_options',
'mb_send_mail',
'mb_split',
'mb_strcut',
'mb_strimwidth',
'mb_stripos',
'mb_stristr',
'mb_strlen',
'mb_strpos',
'mb_strrchr',
'mb_strrichr',
'mb_strripos',
'mb_strrpos',
'mb_strstr',
'mb_strtolower',
'mb_strtoupper',
'mb_strwidth',
'mb_substitute_character',
'mb_substr_count',
'mb_substr'],
'MySQL': ['mysql_affected_rows',
'mysql_client_encoding',
'mysql_close',
'mysql_connect',
'mysql_create_db',
'mysql_data_seek',
'mysql_db_name',
'mysql_db_query',
'mysql_drop_db',
'mysql_errno',
'mysql_error',
'mysql_escape_string',
'mysql_fetch_array',
'mysql_fetch_assoc',
'mysql_fetch_field',
'mysql_fetch_lengths',
'mysql_fetch_object',
'mysql_fetch_row',
'mysql_field_flags',
'mysql_field_len',
'mysql_field_name',
'mysql_field_seek',
'mysql_field_table',
'mysql_field_type',
'mysql_free_result',
'mysql_get_client_info',
'mysql_get_host_info',
'mysql_get_proto_info',
'mysql_get_server_info',
'mysql_info',
'mysql_insert_id',
'mysql_list_dbs',
'mysql_list_fields',
'mysql_list_processes',
'mysql_list_tables',
'mysql_num_fields',
'mysql_num_rows',
'mysql_pconnect',
'mysql_ping',
'mysql_query',
'mysql_real_escape_string',
'mysql_result',
'mysql_select_db',
'mysql_set_charset',
'mysql_stat',
'mysql_tablename',
'mysql_thread_id',
'mysql_unbuffered_query'],
'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
'Ncurses': ['ncurses_addch',
'ncurses_addchnstr',
'ncurses_addchstr',
'ncurses_addnstr',
'ncurses_addstr',
'ncurses_assume_default_colors',
'ncurses_attroff',
'ncurses_attron',
'ncurses_attrset',
'ncurses_baudrate',
'ncurses_beep',
'ncurses_bkgd',
'ncurses_bkgdset',
'ncurses_border',
'ncurses_bottom_panel',
'ncurses_can_change_color',
'ncurses_cbreak',
'ncurses_clear',
'ncurses_clrtobot',
'ncurses_clrtoeol',
'ncurses_color_content',
'ncurses_color_set',
'ncurses_curs_set',
'ncurses_def_prog_mode',
'ncurses_def_shell_mode',
'ncurses_define_key',
'ncurses_del_panel',
'ncurses_delay_output',
'ncurses_delch',
'ncurses_deleteln',
'ncurses_delwin',
'ncurses_doupdate',
'ncurses_echo',
'ncurses_echochar',
'ncurses_end',
'ncurses_erase',
'ncurses_erasechar',
'ncurses_filter',
'ncurses_flash',
'ncurses_flushinp',
'ncurses_getch',
'ncurses_getmaxyx',
'ncurses_getmouse',
'ncurses_getyx',
'ncurses_halfdelay',
'ncurses_has_colors',
'ncurses_has_ic',
'ncurses_has_il',
'ncurses_has_key',
'ncurses_hide_panel',
'ncurses_hline',
'ncurses_inch',
'ncurses_init_color',
'ncurses_init_pair',
'ncurses_init',
'ncurses_insch',
'ncurses_insdelln',
'ncurses_insertln',
'ncurses_insstr',
'ncurses_instr',
'ncurses_isendwin',
'ncurses_keyok',
'ncurses_keypad',
'ncurses_killchar',
'ncurses_longname',
'ncurses_meta',
'ncurses_mouse_trafo',
'ncurses_mouseinterval',
'ncurses_mousemask',
'ncurses_move_panel',
'ncurses_move',
'ncurses_mvaddch',
'ncurses_mvaddchnstr',
'ncurses_mvaddchstr',
'ncurses_mvaddnstr',
'ncurses_mvaddstr',
'ncurses_mvcur',
'ncurses_mvdelch',
'ncurses_mvgetch',
'ncurses_mvhline',
'ncurses_mvinch',
'ncurses_mvvline',
'ncurses_mvwaddstr',
'ncurses_napms',
'ncurses_new_panel',
'ncurses_newpad',
'ncurses_newwin',
'ncurses_nl',
'ncurses_nocbreak',
'ncurses_noecho',
'ncurses_nonl',
'ncurses_noqiflush',
'ncurses_noraw',
'ncurses_pair_content',
'ncurses_panel_above',
'ncurses_panel_below',
'ncurses_panel_window',
'ncurses_pnoutrefresh',
'ncurses_prefresh',
'ncurses_putp',
'ncurses_qiflush',
'ncurses_raw',
'ncurses_refresh',
'ncurses_replace_panel',
'ncurses_reset_prog_mode',
'ncurses_reset_shell_mode',
'ncurses_resetty',
'ncurses_savetty',
'ncurses_scr_dump',
'ncurses_scr_init',
'ncurses_scr_restore',
'ncurses_scr_set',
'ncurses_scrl',
'ncurses_show_panel',
'ncurses_slk_attr',
'ncurses_slk_attroff',
'ncurses_slk_attron',
'ncurses_slk_attrset',
'ncurses_slk_clear',
'ncurses_slk_color',
'ncurses_slk_init',
'ncurses_slk_noutrefresh',
'ncurses_slk_refresh',
'ncurses_slk_restore',
'ncurses_slk_set',
'ncurses_slk_touch',
'ncurses_standend',
'ncurses_standout',
'ncurses_start_color',
'ncurses_termattrs',
'ncurses_termname',
'ncurses_timeout',
'ncurses_top_panel',
'ncurses_typeahead',
'ncurses_ungetch',
'ncurses_ungetmouse',
'ncurses_update_panels',
'ncurses_use_default_colors',
'ncurses_use_env',
'ncurses_use_extended_names',
'ncurses_vidattr',
'ncurses_vline',
'ncurses_waddch',
'ncurses_waddstr',
'ncurses_wattroff',
'ncurses_wattron',
'ncurses_wattrset',
'ncurses_wborder',
'ncurses_wclear',
'ncurses_wcolor_set',
'ncurses_werase',
'ncurses_wgetch',
'ncurses_whline',
'ncurses_wmouse_trafo',
'ncurses_wmove',
'ncurses_wnoutrefresh',
'ncurses_wrefresh',
'ncurses_wstandend',
'ncurses_wstandout',
'ncurses_wvline'],
'Network': ['checkdnsrr',
'closelog',
'define_syslog_variables',
'dns_check_record',
'dns_get_mx',
'dns_get_record',
'fsockopen',
'gethostbyaddr',
'gethostbyname',
'gethostbynamel'],
'Newt': ['newt_bell',
'newt_button_bar',
'newt_button',
'newt_centered_window',
'newt_checkbox_get_value',
'newt_checkbox_set_flags',
'newt_checkbox_set_value',
'newt_checkbox_tree_add_item',
'newt_checkbox_tree_find_item',
'newt_checkbox_tree_get_current',
'newt_checkbox_tree_get_entry_value',
'newt_checkbox_tree_get_multi_selection',
'newt_checkbox_tree_get_selection',
'newt_checkbox_tree_multi',
'newt_checkbox_tree_set_current',
'newt_checkbox_tree_set_entry_value',
'newt_checkbox_tree_set_entry',
'newt_checkbox_tree_set_width',
'newt_checkbox_tree',
'newt_checkbox',
'newt_clear_key_buffer'],
'OAuth': ['oauth_get_sbs', 'oauth_urlencode'],
'OCI8': ['oci_bind_array_by_name',
'oci_bind_by_name',
'oci_cancel',
'oci_close',
'oci_commit',
'oci_connect',
'oci_define_by_name',
'oci_error',
'oci_execute',
'oci_fetch_all',
'oci_fetch_array',
'oci_fetch_assoc',
'oci_fetch_object',
'oci_fetch_row',
'oci_fetch',
'oci_field_is_null',
'oci_field_name',
'oci_field_precision',
'oci_field_scale',
'oci_field_size',
'oci_field_type_raw',
'oci_field_type',
'oci_free_statement',
'oci_internal_debug',
'oci_lob_copy',
'oci_lob_is_equal',
'oci_new_collection',
'oci_new_connect',
'oci_new_cursor',
'oci_new_descriptor',
'oci_num_fields',
'oci_num_rows',
'oci_parse',
'oci_password_change',
'oci_pconnect',
'oci_result',
'oci_rollback',
'oci_server_version',
'oci_set_action',
'oci_set_client_identifier',
'oci_set_client_info',
'oci_set_edition',
'oci_set_module_name',
'oci_set_prefetch',
'oci_statement_type'],
'ODBC': ['odbc_autocommit',
'odbc_binmode',
'odbc_close_all',
'odbc_close',
'odbc_columnprivileges',
'odbc_columns',
'odbc_commit',
'odbc_connect',
'odbc_cursor',
'odbc_data_source',
'odbc_do',
'odbc_error',
'odbc_errormsg',
'odbc_exec',
'odbc_execute',
'odbc_fetch_array',
'odbc_fetch_into',
'odbc_fetch_object',
'odbc_fetch_row',
'odbc_field_len',
'odbc_field_name',
'odbc_field_num',
'odbc_field_precision',
'odbc_field_scale',
'odbc_field_type',
'odbc_foreignkeys',
'odbc_free_result',
'odbc_gettypeinfo',
'odbc_longreadlen',
'odbc_next_result',
'odbc_num_fields',
'odbc_num_rows',
'odbc_pconnect',
'odbc_prepare',
'odbc_primarykeys',
'odbc_procedurecolumns',
'odbc_procedures',
'odbc_result_all',
'odbc_result',
'odbc_rollback',
'odbc_setoption',
'odbc_specialcolumns',
'odbc_statistics',
'odbc_tableprivileges',
'odbc_tables'],
'Object Aggregation': ['aggregate_info',
'aggregate_methods_by_list',
'aggregate_methods_by_regexp'],
'Object overloading': ['overload'],
'OpenAL': ['openal_buffer_create',
'openal_buffer_data',
'openal_buffer_destroy',
'openal_buffer_get',
'openal_buffer_loadwav',
'openal_context_create',
'openal_context_current',
'openal_context_destroy',
'openal_context_process',
'openal_context_suspend',
'openal_device_close',
'openal_device_open',
'openal_listener_get',
'openal_listener_set',
'openal_source_create',
'openal_source_destroy',
'openal_source_get',
'openal_source_pause',
'openal_source_play',
'openal_source_rewind',
'openal_source_set',
'openal_source_stop',
'openal_stream'],
'OpenSSL': ['openssl_csr_export_to_file',
'openssl_csr_export',
'openssl_csr_get_public_key',
'openssl_csr_get_subject',
'openssl_csr_new',
'openssl_csr_sign',
'openssl_decrypt',
'openssl_dh_compute_key',
'openssl_digest',
'openssl_encrypt',
'openssl_error_string',
'openssl_free_key',
'openssl_get_cipher_methods',
'openssl_get_md_methods',
'openssl_get_privatekey',
'openssl_get_publickey',
'openssl_open',
'openssl_pkcs12_export_to_file',
'openssl_pkcs12_export',
'openssl_pkcs12_read',
'openssl_pkcs7_decrypt',
'openssl_pkcs7_encrypt',
'openssl_pkcs7_sign',
'openssl_pkcs7_verify',
'openssl_pkey_export_to_file',
'openssl_pkey_export',
'openssl_pkey_free',
'openssl_pkey_get_details',
'openssl_pkey_get_private',
'openssl_pkey_get_public',
'openssl_pkey_new',
'openssl_private_decrypt',
'openssl_private_encrypt',
'openssl_public_decrypt',
'openssl_public_encrypt',
'openssl_random_pseudo_bytes',
'openssl_seal',
'openssl_sign',
'openssl_verify',
'openssl_x509_check_private_key',
'openssl_x509_checkpurpose',
'openssl_x509_export_to_file',
'openssl_x509_export',
'openssl_x509_free',
'openssl_x509_parse',
'openssl_x509_read'],
'Output Control': ['flush',
'ob_clean',
'ob_end_clean',
'ob_end_flush',
'ob_flush',
'ob_get_clean',
'ob_get_contents',
'ob_get_flush',
'ob_get_length',
'ob_get_level',
'ob_get_status',
'ob_gzhandler',
'ob_implicit_flush',
'ob_list_handlers',
'ob_start',
'output_add_rewrite_var',
'output_reset_rewrite_vars'],
'Ovrimos SQL': ['ovrimos_close',
'ovrimos_commit',
'ovrimos_connect',
'ovrimos_cursor',
'ovrimos_exec',
'ovrimos_execute',
'ovrimos_fetch_into',
'ovrimos_fetch_row',
'ovrimos_field_len',
'ovrimos_field_name',
'ovrimos_field_num',
'ovrimos_field_type',
'ovrimos_free_result',
'ovrimos_longreadlen',
'ovrimos_num_fields',
'ovrimos_num_rows',
'ovrimos_prepare',
'ovrimos_result_all',
'ovrimos_result',
'ovrimos_rollback'],
'PCNTL': ['pcntl_alarm',
'pcntl_exec',
'pcntl_fork',
'pcntl_getpriority',
'pcntl_setpriority',
'pcntl_signal_dispatch',
'pcntl_signal',
'pcntl_sigprocmask',
'pcntl_sigtimedwait',
'pcntl_sigwaitinfo',
'pcntl_wait',
'pcntl_waitpid',
'pcntl_wexitstatus',
'pcntl_wifexited',
'pcntl_wifsignaled',
'pcntl_wifstopped',
'pcntl_wstopsig',
'pcntl_wtermsig'],
'PCRE': ['preg_filter',
'preg_grep',
'preg_last_error',
'preg_match_all',
'preg_match',
'preg_quote',
'preg_replace_callback',
'preg_replace',
'preg_split'],
'PDF': ['PDF_activate_item',
'PDF_add_annotation',
'PDF_add_bookmark',
'PDF_add_launchlink',
'PDF_add_locallink',
'PDF_add_nameddest',
'PDF_add_note',
'PDF_add_outline',
'PDF_add_pdflink',
'PDF_add_table_cell',
'PDF_add_textflow',
'PDF_add_thumbnail',
'PDF_add_weblink',
'PDF_arc',
'PDF_arcn',
'PDF_attach_file',
'PDF_begin_document',
'PDF_begin_font',
'PDF_begin_glyph',
'PDF_begin_item',
'PDF_begin_layer',
'PDF_begin_page_ext',
'PDF_begin_page',
'PDF_begin_pattern',
'PDF_begin_template_ext',
'PDF_begin_template',
'PDF_circle',
'PDF_clip',
'PDF_close_image',
'PDF_close_pdi_page',
'PDF_close_pdi',
'PDF_close',
'PDF_closepath_fill_stroke',
'PDF_closepath_stroke',
'PDF_closepath',
'PDF_concat',
'PDF_continue_text',
'PDF_create_3dview',
'PDF_create_action',
'PDF_create_annotation',
'PDF_create_bookmark',
'PDF_create_field',
'PDF_create_fieldgroup',
'PDF_create_gstate',
'PDF_create_pvf',
'PDF_create_textflow',
'PDF_curveto',
'PDF_define_layer',
'PDF_delete_pvf',
'PDF_delete_table',
'PDF_delete_textflow',
'PDF_delete',
'PDF_encoding_set_char',
'PDF_end_document',
'PDF_end_font',
'PDF_end_glyph',
'PDF_end_item',
'PDF_end_layer',
'PDF_end_page_ext',
'PDF_end_page',
'PDF_end_pattern',
'PDF_end_template',
'PDF_endpath',
'PDF_fill_imageblock',
'PDF_fill_pdfblock',
'PDF_fill_stroke',
'PDF_fill_textblock',
'PDF_fill',
'PDF_findfont',
'PDF_fit_image',
'PDF_fit_pdi_page',
'PDF_fit_table',
'PDF_fit_textflow',
'PDF_fit_textline',
'PDF_get_apiname',
'PDF_get_buffer',
'PDF_get_errmsg',
'PDF_get_errnum',
'PDF_get_font',
'PDF_get_fontname',
'PDF_get_fontsize',
'PDF_get_image_height',
'PDF_get_image_width',
'PDF_get_majorversion',
'PDF_get_minorversion',
'PDF_get_parameter',
'PDF_get_pdi_parameter',
'PDF_get_pdi_value',
'PDF_get_value',
'PDF_info_font',
'PDF_info_matchbox',
'PDF_info_table',
'PDF_info_textflow',
'PDF_info_textline',
'PDF_initgraphics',
'PDF_lineto',
'PDF_load_3ddata',
'PDF_load_font',
'PDF_load_iccprofile',
'PDF_load_image',
'PDF_makespotcolor',
'PDF_moveto',
'PDF_new',
'PDF_open_ccitt',
'PDF_open_file',
'PDF_open_gif',
'PDF_open_image_file',
'PDF_open_image',
'PDF_open_jpeg',
'PDF_open_memory_image',
'PDF_open_pdi_document',
'PDF_open_pdi_page',
'PDF_open_pdi',
'PDF_open_tiff',
'PDF_pcos_get_number',
'PDF_pcos_get_stream',
'PDF_pcos_get_string',
'PDF_place_image',
'PDF_place_pdi_page',
'PDF_process_pdi',
'PDF_rect',
'PDF_restore',
'PDF_resume_page',
'PDF_rotate',
'PDF_save',
'PDF_scale',
'PDF_set_border_color',
'PDF_set_border_dash',
'PDF_set_border_style',
'PDF_set_char_spacing',
'PDF_set_duration',
'PDF_set_gstate',
'PDF_set_horiz_scaling',
'PDF_set_info_author',
'PDF_set_info_creator',
'PDF_set_info_keywords',
'PDF_set_info_subject',
'PDF_set_info_title',
'PDF_set_info',
'PDF_set_layer_dependency',
'PDF_set_leading',
'PDF_set_parameter',
'PDF_set_text_matrix',
'PDF_set_text_pos',
'PDF_set_text_rendering',
'PDF_set_text_rise',
'PDF_set_value',
'PDF_set_word_spacing',
'PDF_setcolor',
'PDF_setdash',
'PDF_setdashpattern',
'PDF_setflat',
'PDF_setfont',
'PDF_setgray_fill',
'PDF_setgray_stroke',
'PDF_setgray',
'PDF_setlinecap',
'PDF_setlinejoin',
'PDF_setlinewidth',
'PDF_setmatrix',
'PDF_setmiterlimit',
'PDF_setpolydash',
'PDF_setrgbcolor_fill',
'PDF_setrgbcolor_stroke',
'PDF_setrgbcolor',
'PDF_shading_pattern',
'PDF_shading',
'PDF_shfill',
'PDF_show_boxed',
'PDF_show_xy',
'PDF_show',
'PDF_skew',
'PDF_stringwidth',
'PDF_stroke',
'PDF_suspend_page',
'PDF_translate',
'PDF_utf16_to_utf8',
'PDF_utf32_to_utf16',
'PDF_utf8_to_utf16'],
'PHP Options/Info': ['assert_options',
'assert',
'dl',
'extension_loaded',
'gc_collect_cycles',
'gc_disable',
'gc_enable',
'gc_enabled',
'get_cfg_var',
'get_current_user',
'get_defined_constants',
'get_extension_funcs',
'get_include_path',
'get_included_files',
'get_loaded_extensions',
'get_magic_quotes_gpc',
'get_magic_quotes_runtime',
'get_required_files',
'getenv',
'getlastmod',
'getmygid',
'getmyinode',
'getmypid',
'getmyuid',
'getopt',
'getrusage',
'ini_alter',
'ini_get_all',
'ini_get',
'ini_restore',
'ini_set',
'magic_quotes_runtime',
'memory_get_peak_usage',
'memory_get_usage',
'php_ini_loaded_file',
'php_ini_scanned_files',
'php_logo_guid',
'php_sapi_name',
'php_uname',
'phpcredits',
'phpinfo',
'phpversion',
'putenv',
'restore_include_path',
'set_include_path',
'set_magic_quotes_runtime',
'set_time_limit',
'sys_get_temp_dir',
'version_compare',
'zend_logo_guid',
'zend_thread_id',
'zend_version'],
'POSIX': ['posix_access',
'posix_ctermid',
'posix_errno',
'posix_get_last_error',
'posix_getcwd',
'posix_getegid',
'posix_geteuid',
'posix_getgid',
'posix_getgrgid',
'posix_getgrnam',
'posix_getgroups',
'posix_getlogin',
'posix_getpgid',
'posix_getpgrp',
'posix_getpid',
'posix_getppid',
'posix_getpwnam',
'posix_getpwuid',
'posix_getrlimit',
'posix_getsid',
'posix_getuid',
'posix_initgroups',
'posix_isatty',
'posix_kill',
'posix_mkfifo',
'posix_mknod',
'posix_setegid',
'posix_seteuid',
'posix_setgid',
'posix_setpgid',
'posix_setsid',
'posix_setuid',
'posix_strerror',
'posix_times',
'posix_ttyname',
'posix_uname'],
'POSIX Regex': ['ereg_replace',
'ereg',
'eregi_replace',
'eregi',
'split',
'spliti',
'sql_regcase'],
'PS': ['ps_add_bookmark',
'ps_add_launchlink',
'ps_add_locallink',
'ps_add_note',
'ps_add_pdflink',
'ps_add_weblink',
'ps_arc',
'ps_arcn',
'ps_begin_page',
'ps_begin_pattern',
'ps_begin_template',
'ps_circle',
'ps_clip',
'ps_close_image',
'ps_close',
'ps_closepath_stroke',
'ps_closepath',
'ps_continue_text',
'ps_curveto',
'ps_delete',
'ps_end_page',
'ps_end_pattern',
'ps_end_template',
'ps_fill_stroke',
'ps_fill',
'ps_findfont',
'ps_get_buffer',
'ps_get_parameter',
'ps_get_value',
'ps_hyphenate',
'ps_include_file',
'ps_lineto',
'ps_makespotcolor',
'ps_moveto',
'ps_new',
'ps_open_file',
'ps_open_image_file',
'ps_open_image',
'ps_open_memory_image',
'ps_place_image',
'ps_rect',
'ps_restore',
'ps_rotate',
'ps_save',
'ps_scale',
'ps_set_border_color',
'ps_set_border_dash',
'ps_set_border_style',
'ps_set_info',
'ps_set_parameter',
'ps_set_text_pos',
'ps_set_value',
'ps_setcolor',
'ps_setdash',
'ps_setflat',
'ps_setfont',
'ps_setgray',
'ps_setlinecap',
'ps_setlinejoin',
'ps_setlinewidth',
'ps_setmiterlimit',
'ps_setoverprintmode',
'ps_setpolydash',
'ps_shading_pattern',
'ps_shading',
'ps_shfill',
'ps_show_boxed',
'ps_show_xy2',
'ps_show_xy',
'ps_show2',
'ps_show',
'ps_string_geometry',
'ps_stringwidth',
'ps_stroke',
'ps_symbol_name',
'ps_symbol_width',
'ps_symbol',
'ps_translate'],
'Paradox': ['px_close',
'px_create_fp',
'px_date2string',
'px_delete_record',
'px_delete',
'px_get_field',
'px_get_info',
'px_get_parameter',
'px_get_record',
'px_get_schema',
'px_get_value',
'px_insert_record',
'px_new',
'px_numfields',
'px_numrecords',
'px_open_fp',
'px_put_record',
'px_retrieve_record',
'px_set_blob_file',
'px_set_parameter',
'px_set_tablename',
'px_set_targetencoding',
'px_set_value',
'px_timestamp2string',
'px_update_record'],
'Parsekit': ['parsekit_compile_file',
'parsekit_compile_string',
'parsekit_func_arginfo'],
'PostgreSQL': ['pg_affected_rows',
'pg_cancel_query',
'pg_client_encoding',
'pg_close',
'pg_connect',
'pg_connection_busy',
'pg_connection_reset',
'pg_connection_status',
'pg_convert',
'pg_copy_from',
'pg_copy_to',
'pg_dbname',
'pg_delete',
'pg_end_copy',
'pg_escape_bytea',
'pg_escape_string',
'pg_execute',
'pg_fetch_all_columns',
'pg_fetch_all',
'pg_fetch_array',
'pg_fetch_assoc',
'pg_fetch_object',
'pg_fetch_result',
'pg_fetch_row',
'pg_field_is_null',
'pg_field_name',
'pg_field_num',
'pg_field_prtlen',
'pg_field_size',
'pg_field_table',
'pg_field_type_oid',
'pg_field_type',
'pg_free_result',
'pg_get_notify',
'pg_get_pid',
'pg_get_result',
'pg_host',
'pg_insert',
'pg_last_error',
'pg_last_notice',
'pg_last_oid',
'pg_lo_close',
'pg_lo_create',
'pg_lo_export',
'pg_lo_import',
'pg_lo_open',
'pg_lo_read_all',
'pg_lo_read',
'pg_lo_seek',
'pg_lo_tell',
'pg_lo_unlink',
'pg_lo_write',
'pg_meta_data',
'pg_num_fields',
'pg_num_rows',
'pg_options',
'pg_parameter_status',
'pg_pconnect',
'pg_ping',
'pg_port',
'pg_prepare'],
'Printer': ['printer_abort',
'printer_close',
'printer_create_brush',
'printer_create_dc',
'printer_create_font',
'printer_create_pen',
'printer_delete_brush',
'printer_delete_dc',
'printer_delete_font',
'printer_delete_pen',
'printer_draw_bmp',
'printer_draw_chord',
'printer_draw_elipse',
'printer_draw_line',
'printer_draw_pie',
'printer_draw_rectangle',
'printer_draw_roundrect',
'printer_draw_text',
'printer_end_doc',
'printer_end_page',
'printer_get_option',
'printer_list',
'printer_logical_fontheight',
'printer_open',
'printer_select_brush',
'printer_select_font',
'printer_select_pen',
'printer_set_option',
'printer_start_doc',
'printer_start_page',
'printer_write'],
'Program execution': ['escapeshellarg',
'escapeshellcmd',
'exec',
'passthru',
'proc_close',
'proc_get_status',
'proc_nice',
'proc_open',
'proc_terminate',
'shell_exec',
'system'],
'Pspell': ['pspell_add_to_personal',
'pspell_add_to_session',
'pspell_check',
'pspell_clear_session',
'pspell_config_create',
'pspell_config_data_dir',
'pspell_config_dict_dir',
'pspell_config_ignore',
'pspell_config_mode',
'pspell_config_personal',
'pspell_config_repl',
'pspell_config_runtogether',
'pspell_config_save_repl'],
'RPM Reader': ['rpm_close',
'rpm_get_tag',
'rpm_is_valid',
'rpm_open',
'rpm_version'],
'RRD': ['rrd_create',
'rrd_error',
'rrd_fetch',
'rrd_first',
'rrd_graph',
'rrd_info',
'rrd_last',
'rrd_lastupdate',
'rrd_restore',
'rrd_tune',
'rrd_update',
'rrd_xport'],
'Radius': ['radius_acct_open',
'radius_add_server',
'radius_auth_open',
'radius_close',
'radius_config',
'radius_create_request',
'radius_cvt_addr',
'radius_cvt_int',
'radius_cvt_string',
'radius_demangle_mppe_key',
'radius_demangle',
'radius_get_attr',
'radius_get_vendor_attr',
'radius_put_addr',
'radius_put_attr',
'radius_put_int',
'radius_put_string',
'radius_put_vendor_addr',
'radius_put_vendor_attr',
'radius_put_vendor_int',
'radius_put_vendor_string',
'radius_request_authenticator',
'radius_send_request',
'radius_server_secret',
'radius_strerror'],
'Rar': ['rar_wrapper_cache_stats'],
'Readline': ['readline_add_history',
'readline_callback_handler_install',
'readline_callback_handler_remove',
'readline_callback_read_char',
'readline_clear_history',
'readline_completion_function',
'readline_info',
'readline_list_history',
'readline_on_new_line',
'readline_read_history',
'readline_redisplay',
'readline_write_history',
'readline'],
'Recode': ['recode_file', 'recode_string', 'recode'],
'SNMP': ['snmp_get_quick_print',
'snmp_get_valueretrieval',
'snmp_read_mib',
'snmp_set_enum_print',
'snmp_set_oid_numeric_print',
'snmp_set_oid_output_format',
'snmp_set_quick_print',
'snmp_set_valueretrieval',
'snmp2_get',
'snmp2_getnext',
'snmp2_real_walk',
'snmp2_set',
'snmp2_walk',
'snmp3_get',
'snmp3_getnext',
'snmp3_real_walk',
'snmp3_set',
'snmp3_walk',
'snmpget',
'snmpgetnext',
'snmprealwalk',
'snmpset',
'snmpwalk',
'snmpwalkoid'],
'SOAP': ['is_soap_fault', 'use_soap_error_handler'],
'SPL': ['class_implements',
'class_parents',
'iterator_apply',
'iterator_count',
'iterator_to_array',
'spl_autoload_call',
'spl_autoload_extensions',
'spl_autoload_functions',
'spl_autoload_register',
'spl_autoload_unregister',
'spl_autoload',
'spl_classes',
'spl_object_hash'],
'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'],
'SQLite': ['sqlite_array_query', 'sqlite_busy_timeout', 'sqlite_changes'],
'SSH2': ['ssh2_auth_hostbased_file',
'ssh2_auth_none',
'ssh2_auth_password',
'ssh2_auth_pubkey_file',
'ssh2_connect',
'ssh2_exec',
'ssh2_fetch_stream',
'ssh2_fingerprint',
'ssh2_methods_negotiated',
'ssh2_publickey_add',
'ssh2_publickey_init',
'ssh2_publickey_list',
'ssh2_publickey_remove',
'ssh2_scp_recv',
'ssh2_scp_send',
'ssh2_sftp_lstat',
'ssh2_sftp_mkdir',
'ssh2_sftp_readlink',
'ssh2_sftp_realpath',
'ssh2_sftp_rename',
'ssh2_sftp_rmdir',
'ssh2_sftp_stat',
'ssh2_sftp_symlink',
'ssh2_sftp_unlink',
'ssh2_sftp',
'ssh2_shell',
'ssh2_tunnel'],
'SVN': ['svn_add',
'svn_auth_get_parameter',
'svn_auth_set_parameter',
'svn_blame',
'svn_cat',
'svn_checkout',
'svn_cleanup',
'svn_client_version',
'svn_commit',
'svn_delete',
'svn_diff',
'svn_export',
'svn_fs_abort_txn',
'svn_fs_apply_text',
'svn_fs_begin_txn2',
'svn_fs_change_node_prop',
'svn_fs_check_path',
'svn_fs_contents_changed',
'svn_fs_copy',
'svn_fs_delete',
'svn_fs_dir_entries',
'svn_fs_file_contents',
'svn_fs_file_length',
'svn_fs_is_dir',
'svn_fs_is_file',
'svn_fs_make_dir',
'svn_fs_make_file',
'svn_fs_node_created_rev',
'svn_fs_node_prop',
'svn_fs_props_changed',
'svn_fs_revision_prop',
'svn_fs_revision_root',
'svn_fs_txn_root',
'svn_fs_youngest_rev',
'svn_import',
'svn_log',
'svn_ls',
'svn_mkdir',
'svn_repos_create',
'svn_repos_fs_begin_txn_for_commit',
'svn_repos_fs_commit_txn',
'svn_repos_fs',
'svn_repos_hotcopy',
'svn_repos_open',
'svn_repos_recover',
'svn_revert',
'svn_status',
'svn_update'],
'SWF': ['swf_actiongeturl',
'swf_actiongotoframe',
'swf_actiongotolabel',
'swf_actionnextframe',
'swf_actionplay',
'swf_actionprevframe',
'swf_actionsettarget',
'swf_actionstop',
'swf_actiontogglequality',
'swf_actionwaitforframe',
'swf_addbuttonrecord',
'swf_addcolor',
'swf_closefile',
'swf_definebitmap',
'swf_definefont',
'swf_defineline',
'swf_definepoly',
'swf_definerect',
'swf_definetext',
'swf_endbutton',
'swf_enddoaction',
'swf_endshape',
'swf_endsymbol',
'swf_fontsize',
'swf_fontslant',
'swf_fonttracking',
'swf_getbitmapinfo',
'swf_getfontinfo',
'swf_getframe',
'swf_labelframe',
'swf_lookat',
'swf_modifyobject',
'swf_mulcolor',
'swf_nextid',
'swf_oncondition',
'swf_openfile',
'swf_ortho2',
'swf_ortho',
'swf_perspective',
'swf_placeobject',
'swf_polarview',
'swf_popmatrix',
'swf_posround',
'swf_pushmatrix',
'swf_removeobject',
'swf_rotate',
'swf_scale',
'swf_setfont',
'swf_setframe',
'swf_shapearc',
'swf_shapecurveto3',
'swf_shapecurveto',
'swf_shapefillbitmapclip',
'swf_shapefillbitmaptile',
'swf_shapefilloff',
'swf_shapefillsolid',
'swf_shapelinesolid',
'swf_shapelineto',
'swf_shapemoveto',
'swf_showframe',
'swf_startbutton',
'swf_startdoaction',
'swf_startshape',
'swf_startsymbol',
'swf_textwidth',
'swf_translate',
'swf_viewport'],
'Semaphore': ['ftok',
'msg_get_queue',
'msg_queue_exists',
'msg_receive',
'msg_remove_queue',
'msg_send',
'msg_set_queue',
'msg_stat_queue',
'sem_acquire',
'sem_get',
'sem_release',
'sem_remove',
'shm_attach',
'shm_detach',
'shm_get_var',
'shm_has_var',
'shm_put_var',
'shm_remove_var',
'shm_remove'],
'Session': ['session_cache_expire',
'session_cache_limiter',
'session_commit',
'session_decode',
'session_destroy',
'session_encode',
'session_get_cookie_params',
'session_id',
'session_is_registered',
'session_module_name',
'session_name',
'session_regenerate_id',
'session_register',
'session_save_path',
'session_set_cookie_params',
'session_set_save_handler',
'session_start',
'session_unregister',
'session_unset',
'session_write_close'],
'Session PgSQL': ['session_pgsql_add_error',
'session_pgsql_get_error',
'session_pgsql_get_field',
'session_pgsql_reset',
'session_pgsql_set_field',
'session_pgsql_status'],
'Shared Memory': ['shmop_close',
'shmop_delete',
'shmop_open',
'shmop_read',
'shmop_size',
'shmop_write'],
'SimpleXML': ['simplexml_import_dom',
'simplexml_load_file',
'simplexml_load_string'],
'Socket': ['socket_accept',
'socket_bind',
'socket_clear_error',
'socket_close',
'socket_connect',
'socket_create_listen',
'socket_create_pair',
'socket_create',
'socket_get_option',
'socket_getpeername',
'socket_getsockname',
'socket_last_error',
'socket_listen',
'socket_read',
'socket_recv',
'socket_recvfrom',
'socket_select',
'socket_send',
'socket_sendto',
'socket_set_block',
'socket_set_nonblock',
'socket_set_option',
'socket_shutdown',
'socket_strerror',
'socket_write'],
'Solr': ['solr_get_version'],
'Statistic': ['stats_absolute_deviation',
'stats_cdf_beta',
'stats_cdf_binomial',
'stats_cdf_cauchy',
'stats_cdf_chisquare',
'stats_cdf_exponential',
'stats_cdf_f',
'stats_cdf_gamma',
'stats_cdf_laplace',
'stats_cdf_logistic',
'stats_cdf_negative_binomial',
'stats_cdf_noncentral_chisquare',
'stats_cdf_noncentral_f',
'stats_cdf_poisson',
'stats_cdf_t',
'stats_cdf_uniform',
'stats_cdf_weibull',
'stats_covariance',
'stats_den_uniform',
'stats_dens_beta',
'stats_dens_cauchy',
'stats_dens_chisquare',
'stats_dens_exponential',
'stats_dens_f',
'stats_dens_gamma',
'stats_dens_laplace',
'stats_dens_logistic',
'stats_dens_negative_binomial',
'stats_dens_normal',
'stats_dens_pmf_binomial',
'stats_dens_pmf_hypergeometric',
'stats_dens_pmf_poisson',
'stats_dens_t',
'stats_dens_weibull',
'stats_harmonic_mean',
'stats_kurtosis',
'stats_rand_gen_beta',
'stats_rand_gen_chisquare',
'stats_rand_gen_exponential',
'stats_rand_gen_f',
'stats_rand_gen_funiform',
'stats_rand_gen_gamma',
'stats_rand_gen_ibinomial_negative',
'stats_rand_gen_ibinomial',
'stats_rand_gen_int',
'stats_rand_gen_ipoisson',
'stats_rand_gen_iuniform',
'stats_rand_gen_noncenral_chisquare',
'stats_rand_gen_noncentral_f',
'stats_rand_gen_noncentral_t',
'stats_rand_gen_normal',
'stats_rand_gen_t',
'stats_rand_get_seeds',
'stats_rand_phrase_to_seeds',
'stats_rand_ranf',
'stats_rand_setall',
'stats_skew',
'stats_standard_deviation',
'stats_stat_binomial_coef',
'stats_stat_correlation',
'stats_stat_gennch',
'stats_stat_independent_t',
'stats_stat_innerproduct',
'stats_stat_noncentral_t',
'stats_stat_paired_t',
'stats_stat_percentile',
'stats_stat_powersum',
'stats_variance'],
'Stomp': ['stomp_connect_error', 'stomp_version'],
'Stream': ['set_socket_blocking',
'stream_bucket_append',
'stream_bucket_make_writeable',
'stream_bucket_new',
'stream_bucket_prepend',
'stream_context_create',
'stream_context_get_default',
'stream_context_get_options',
'stream_context_get_params',
'stream_context_set_default',
'stream_context_set_option',
'stream_context_set_params',
'stream_copy_to_stream',
'stream_encoding',
'stream_filter_append',
'stream_filter_prepend',
'stream_filter_register',
'stream_filter_remove',
'stream_get_contents',
'stream_get_filters',
'stream_get_line',
'stream_get_meta_data',
'stream_get_transports',
'stream_get_wrappers',
'stream_is_local',
'stream_notification_callback',
'stream_register_wrapper',
'stream_resolve_include_path',
'stream_select'],
'String': ['addcslashes',
'addslashes',
'bin2hex',
'chop',
'chr',
'chunk_split',
'convert_cyr_string',
'convert_uudecode',
'convert_uuencode',
'count_chars',
'crc32',
'crypt',
'echo',
'explode',
'fprintf',
'get_html_translation_table',
'hebrev',
'hebrevc',
'html_entity_decode',
'htmlentities',
'htmlspecialchars_decode',
'htmlspecialchars',
'implode',
'join',
'lcfirst',
'levenshtein',
'localeconv',
'ltrim',
'md5_file',
'md5',
'metaphone',
'money_format',
'nl_langinfo',
'nl2br',
'number_format',
'ord',
'parse_str',
'print',
'printf',
'quoted_printable_decode',
'quoted_printable_encode',
'quotemeta',
'rtrim',
'setlocale',
'sha1_file',
'sha1',
'similar_text',
'soundex',
'sprintf',
'sscanf',
'str_getcsv',
'str_ireplace',
'str_pad',
'str_repeat',
'str_replace',
'str_rot13',
'str_shuffle',
'str_split',
'str_word_count',
'strcasecmp',
'strchr',
'strcmp',
'strcoll',
'strcspn',
'strip_tags',
'stripcslashes',
'stripos',
'stripslashes',
'stristr',
'strlen',
'strnatcasecmp',
'strnatcmp',
'strncasecmp',
'strncmp',
'strpbrk',
'strpos',
'strrchr',
'strrev',
'strripos',
'strrpos',
'strspn'],
'Sybase': ['sybase_affected_rows',
'sybase_close',
'sybase_connect',
'sybase_data_seek',
'sybase_deadlock_retry_count',
'sybase_fetch_array',
'sybase_fetch_assoc',
'sybase_fetch_field',
'sybase_fetch_object',
'sybase_fetch_row',
'sybase_field_seek',
'sybase_free_result',
'sybase_get_last_message',
'sybase_min_client_severity',
'sybase_min_error_severity',
'sybase_min_message_severity',
'sybase_min_server_severity',
'sybase_num_fields',
'sybase_num_rows',
'sybase_pconnect',
'sybase_query',
'sybase_result',
'sybase_select_db',
'sybase_set_message_handler',
'sybase_unbuffered_query'],
'TCP': ['tcpwrap_check'],
'Tidy': ['ob_tidyhandler',
'tidy_access_count',
'tidy_config_count',
'tidy_error_count',
'tidy_get_error_buffer',
'tidy_get_output',
'tidy_load_config',
'tidy_reset_config',
'tidy_save_config',
'tidy_set_encoding',
'tidy_setopt',
'tidy_warning_count'],
'Tokenizer': ['token_get_all', 'token_name'],
'URL': ['base64_decode',
'base64_encode',
'get_headers',
'get_meta_tags',
'http_build_query',
'parse_url',
'rawurldecode',
'rawurlencode',
'urldecode',
'urlencode'],
'Variable handling': ['debug_zval_dump',
'doubleval',
'empty',
'floatval',
'get_defined_vars',
'get_resource_type',
'gettype',
'import_request_variables',
'intval',
'is_array',
'is_bool',
'is_callable',
'is_double',
'is_float',
'is_int',
'is_integer',
'is_long',
'is_null',
'is_numeric',
'is_object',
'is_real',
'is_resource',
'is_scalar',
'is_string',
'isset',
'print_r',
'serialize',
'settype',
'strval',
'unserialize',
'unset',
'var_dump',
'var_export'],
'W32api': ['w32api_deftype',
'w32api_init_dtype',
'w32api_invoke_function',
'w32api_register_function',
'w32api_set_call_method'],
'WDDX': ['wddx_add_vars',
'wddx_deserialize',
'wddx_packet_end',
'wddx_packet_start',
'wddx_serialize_value',
'wddx_serialize_vars',
'wddx_unserialize'],
'WinCache': ['wincache_fcache_fileinfo',
'wincache_fcache_meminfo',
'wincache_lock',
'wincache_ocache_fileinfo',
'wincache_ocache_meminfo',
'wincache_refresh_if_changed',
'wincache_rplist_fileinfo',
'wincache_rplist_meminfo',
'wincache_scache_info',
'wincache_scache_meminfo',
'wincache_ucache_add',
'wincache_ucache_cas',
'wincache_ucache_clear',
'wincache_ucache_dec',
'wincache_ucache_delete',
'wincache_ucache_exists',
'wincache_ucache_get',
'wincache_ucache_inc',
'wincache_ucache_info',
'wincache_ucache_meminfo',
'wincache_ucache_set',
'wincache_unlock'],
'XML Parser': ['utf8_decode'],
'XML-RPC': ['xmlrpc_decode_request',
'xmlrpc_decode',
'xmlrpc_encode_request',
'xmlrpc_encode',
'xmlrpc_get_type',
'xmlrpc_is_fault',
'xmlrpc_parse_method_descriptions',
'xmlrpc_server_add_introspection_data',
'xmlrpc_server_call_method',
'xmlrpc_server_create',
'xmlrpc_server_destroy',
'xmlrpc_server_register_introspection_callback',
'xmlrpc_server_register_method',
'xmlrpc_set_type'],
'XSLT (PHP4)': ['xslt_backend_info',
'xslt_backend_name',
'xslt_backend_version',
'xslt_create',
'xslt_errno',
'xslt_error',
'xslt_free',
'xslt_getopt',
'xslt_process',
'xslt_set_base',
'xslt_set_encoding',
'xslt_set_error_handler',
'xslt_set_log',
'xslt_set_object',
'xslt_set_sax_handler',
'xslt_set_sax_handlers',
'xslt_set_scheme_handler',
'xslt_set_scheme_handlers',
'xslt_setopt'],
'YAZ': ['yaz_addinfo',
'yaz_ccl_conf',
'yaz_ccl_parse',
'yaz_close',
'yaz_connect',
'yaz_database',
'yaz_element',
'yaz_errno',
'yaz_error',
'yaz_es_result',
'yaz_es',
'yaz_get_option',
'yaz_hits',
'yaz_itemorder',
'yaz_present',
'yaz_range',
'yaz_record',
'yaz_scan_result',
'yaz_scan',
'yaz_schema',
'yaz_search',
'yaz_set_option',
'yaz_sort',
'yaz_syntax',
'yaz_wait'],
'YP/NIS': ['yp_all',
'yp_cat',
'yp_err_string',
'yp_errno',
'yp_first',
'yp_get_default_domain',
'yp_master',
'yp_match',
'yp_next',
'yp_order'],
'Yaml': ['yaml_emit_file',
'yaml_emit',
'yaml_parse_file',
'yaml_parse_url',
'yaml_parse'],
'Zip': ['zip_close',
'zip_entry_close',
'zip_entry_compressedsize',
'zip_entry_compressionmethod',
'zip_entry_filesize',
'zip_entry_name',
'zip_entry_open',
'zip_entry_read',
'zip_open',
'zip_read'],
'Zlib': ['gzclose',
'gzcompress',
'gzdecode',
'gzdeflate',
'gzencode',
'gzeof',
'gzfile',
'gzgetc',
'gzgets',
'gzgetss',
'gzinflate',
'gzopen',
'gzpassthru',
'gzputs',
'gzread',
'gzrewind',
'gzseek',
'gztell',
'gzuncompress',
'gzwrite',
'readgzfile',
'zlib_get_coding_type'],
'bcompiler': ['bcompiler_load_exe',
'bcompiler_load',
'bcompiler_parse_class',
'bcompiler_read',
'bcompiler_write_class',
'bcompiler_write_constant',
'bcompiler_write_exe_footer',
'bcompiler_write_file',
'bcompiler_write_footer',
'bcompiler_write_function',
'bcompiler_write_functions_from_file',
'bcompiler_write_header',
'bcompiler_write_included_filename'],
'cURL': ['curl_close',
'curl_copy_handle',
'curl_errno',
'curl_error',
'curl_exec',
'curl_getinfo',
'curl_init',
'curl_multi_add_handle',
'curl_multi_close',
'curl_multi_exec',
'curl_multi_getcontent',
'curl_multi_info_read',
'curl_multi_init',
'curl_multi_remove_handle',
'curl_multi_select',
'curl_setopt_array',
'curl_setopt',
'curl_version'],
'chdb': ['chdb_create'],
'dBase': ['dbase_add_record',
'dbase_close',
'dbase_create',
'dbase_delete_record',
'dbase_get_header_info',
'dbase_get_record_with_names',
'dbase_get_record',
'dbase_numfields',
'dbase_numrecords',
'dbase_open',
'dbase_pack',
'dbase_replace_record'],
'dbx': ['dbx_close',
'dbx_compare',
'dbx_connect',
'dbx_error',
'dbx_escape_string',
'dbx_fetch_row'],
'filePro': ['filepro_fieldcount',
'filepro_fieldname',
'filepro_fieldtype',
'filepro_fieldwidth',
'filepro_retrieve',
'filepro_rowcount',
'filepro'],
'iconv': ['iconv_get_encoding',
'iconv_mime_decode_headers',
'iconv_mime_decode',
'iconv_mime_encode',
'iconv_set_encoding',
'iconv_strlen',
'iconv_strpos',
'iconv_strrpos',
'iconv_substr',
'iconv',
'ob_iconv_handler'],
'inclued': ['inclued_get_data'],
'intl': ['intl_error_name',
'intl_get_error_code',
'intl_get_error_message',
'intl_is_failure'],
'libxml': ['libxml_clear_errors',
'libxml_disable_entity_loader',
'libxml_get_errors',
'libxml_get_last_error',
'libxml_set_streams_context',
'libxml_use_internal_errors'],
'mSQL': ['msql_affected_rows',
'msql_close',
'msql_connect',
'msql_create_db',
'msql_createdb',
'msql_data_seek',
'msql_db_query',
'msql_dbname',
'msql_drop_db',
'msql_error',
'msql_fetch_array',
'msql_fetch_field',
'msql_fetch_object',
'msql_fetch_row',
'msql_field_flags',
'msql_field_len',
'msql_field_name',
'msql_field_seek',
'msql_field_table',
'msql_field_type',
'msql_fieldflags',
'msql_fieldlen',
'msql_fieldname',
'msql_fieldtable',
'msql_fieldtype',
'msql_free_result',
'msql_list_dbs',
'msql_list_fields',
'msql_list_tables',
'msql_num_fields',
'msql_num_rows',
'msql_numfields',
'msql_numrows',
'msql_pconnect',
'msql_query',
'msql_regcase',
'msql_result',
'msql_select_db',
'msql_tablename',
'msql'],
'mnoGoSearch': ['udm_add_search_limit',
'udm_alloc_agent_array',
'udm_alloc_agent',
'udm_api_version',
'udm_cat_list',
'udm_cat_path',
'udm_check_charset',
'udm_check_stored',
'udm_clear_search_limits',
'udm_close_stored',
'udm_crc32',
'udm_errno',
'udm_error',
'udm_find',
'udm_free_agent',
'udm_free_ispell_data',
'udm_free_res',
'udm_get_doc_count',
'udm_get_res_field',
'udm_get_res_param',
'udm_hash32',
'udm_load_ispell_data',
'udm_open_stored',
'udm_set_agent_param'],
'mqseries': ['mqseries_back',
'mqseries_begin',
'mqseries_close',
'mqseries_cmit',
'mqseries_conn',
'mqseries_connx',
'mqseries_disc',
'mqseries_get',
'mqseries_inq',
'mqseries_open',
'mqseries_put1',
'mqseries_put',
'mqseries_set',
'mqseries_strerror'],
'mysqlnd_qc': ['mysqlnd_qc_change_handler',
'mysqlnd_qc_clear_cache',
'mysqlnd_qc_get_cache_info',
'mysqlnd_qc_get_core_stats',
'mysqlnd_qc_get_handler',
'mysqlnd_qc_get_query_trace_log',
'mysqlnd_qc_set_user_handlers'],
'qtdom': ['qdom_error', 'qdom_tree'],
'runkit': ['runkit_class_adopt',
'runkit_class_emancipate',
'runkit_constant_add',
'runkit_constant_redefine',
'runkit_constant_remove',
'runkit_function_add',
'runkit_function_copy',
'runkit_function_redefine',
'runkit_function_remove',
'runkit_function_rename',
'runkit_import',
'runkit_lint_file',
'runkit_lint',
'runkit_method_add',
'runkit_method_copy',
'runkit_method_redefine',
'runkit_method_remove',
'runkit_method_rename',
'runkit_return_value_used',
'runkit_sandbox_output_handler',
'runkit_superglobals'],
'ssdeep': ['ssdeep_fuzzy_compare',
'ssdeep_fuzzy_hash_filename',
'ssdeep_fuzzy_hash'],
'vpopmail': ['vpopmail_add_alias_domain_ex',
'vpopmail_add_alias_domain',
'vpopmail_add_domain_ex',
'vpopmail_add_domain',
'vpopmail_add_user',
'vpopmail_alias_add',
'vpopmail_alias_del_domain',
'vpopmail_alias_del',
'vpopmail_alias_get_all',
'vpopmail_alias_get',
'vpopmail_auth_user',
'vpopmail_del_domain_ex',
'vpopmail_del_domain',
'vpopmail_del_user',
'vpopmail_error',
'vpopmail_passwd',
'vpopmail_set_user_quota'],
'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'],
'win32service': ['win32_continue_service',
'win32_create_service',
'win32_delete_service',
'win32_get_last_control_message',
'win32_pause_service',
'win32_query_service_status',
'win32_set_service_status',
'win32_start_service_ctrl_dispatcher',
'win32_start_service',
'win32_stop_service'],
'xattr': ['xattr_get',
'xattr_list',
'xattr_remove',
'xattr_set',
'xattr_supported'],
'xdiff': ['xdiff_file_bdiff_size',
'xdiff_file_bdiff',
'xdiff_file_bpatch',
'xdiff_file_diff_binary',
'xdiff_file_diff',
'xdiff_file_merge3',
'xdiff_file_patch_binary',
'xdiff_file_patch',
'xdiff_file_rabdiff',
'xdiff_string_bdiff_size',
'xdiff_string_bdiff',
'xdiff_string_bpatch',
'xdiff_string_diff_binary',
'xdiff_string_diff',
'xdiff_string_merge3',
'xdiff_string_patch_binary',
'xdiff_string_patch',
'xdiff_string_rabdiff']}
if __name__ == '__main__':
import glob
import os
import pprint
import re
import shutil
import tarfile
import urllib.request, urllib.parse, urllib.error
PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
PHP_MANUAL_DIR = './php-chunked-xhtml/'
PHP_REFERENCE_GLOB = 'ref.*'
PHP_FUNCTION_RE = '<a href="function\..*?\.html">(.*?)</a>'
PHP_MODULE_RE = '<title>(.*?) Functions</title>'
def get_php_functions():
function_re = re.compile(PHP_FUNCTION_RE)
module_re = re.compile(PHP_MODULE_RE)
modules = {}
for file in get_php_references():
module = ''
for line in open(file):
if not module:
search = module_re.search(line)
if search:
module = search.group(1)
modules[module] = []
elif '<h2>Table of Contents</h2>' in line:
for match in function_re.finditer(line):
fn = match.group(1)
if '->' not in fn and '::' not in fn:
modules[module].append(fn)
# These are dummy manual pages, not actual functions
if module == 'PHP Options/Info':
modules[module].remove('main')
elif module == 'Filesystem':
modules[module].remove('delete')
if not modules[module]:
del modules[module]
break
return modules
def get_php_references():
download = urllib.request.urlretrieve(PHP_MANUAL_URL)
tar = tarfile.open(download[0])
tar.extractall()
tar.close()
for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
yield file
os.remove(download[0])
def regenerate(filename, modules):
f = open(filename)
try:
content = f.read()
finally:
f.close()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
f = open(filename, 'w')
f.write(header)
f.write('MODULES = %s\n\n' % pprint.pformat(modules))
f.write(footer)
f.close()
def run():
print('>> Downloading Function Index')
modules = get_php_functions()
total = sum(len(v) for v in modules.values())
print('%d functions found' % total)
regenerate(__file__, modules)
shutil.rmtree(PHP_MANUAL_DIR)
run()
| mit | 6,263,437,793,173,669,000 | -4,431,683,311,262,582,000 | 31.238711 | 87 | 0.441067 | false |
mcgoddard/widgetr | env/Lib/site-packages/werkzeug/urls.py | 216 | 36710 | # -*- coding: utf-8 -*-
"""
werkzeug.urls
~~~~~~~~~~~~~
``werkzeug.urls`` used to provide several wrapper functions for Python 2
urlparse, whose main purpose were to work around the behavior of the Py2
stdlib and its lack of unicode support. While this was already a somewhat
inconvenient situation, it got even more complicated because Python 3's
``urllib.parse`` actually does handle unicode properly. In other words,
this module would wrap two libraries with completely different behavior. So
now this module contains a 2-and-3-compatible backport of Python 3's
``urllib.parse``, which is mostly API-compatible.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import re
from werkzeug._compat import text_type, PY2, to_unicode, \
to_native, implements_to_string, try_coerce_native, \
normalize_string_tuple, make_literal_wrapper, \
fix_tuple_repr
from werkzeug._internal import _encode_idna, _decode_idna
from werkzeug.datastructures import MultiDict, iter_multi_items
from collections import namedtuple
# A regular expression for what a valid schema looks like
_scheme_re = re.compile(r'^[a-zA-Z0-9+-.]+$')
# Characters that are safe in any part of an URL.
_always_safe = (b'abcdefghijklmnopqrstuvwxyz'
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+')
_hexdigits = '0123456789ABCDEFabcdef'
_hextobyte = dict(
((a + b).encode(), int(a + b, 16))
for a in _hexdigits for b in _hexdigits
)
_URLTuple = fix_tuple_repr(namedtuple(
'_URLTuple',
['scheme', 'netloc', 'path', 'query', 'fragment']
))
class BaseURL(_URLTuple):
'''Superclass of :py:class:`URL` and :py:class:`BytesURL`.'''
__slots__ = ()
def replace(self, **kwargs):
"""Return an URL with the same values, except for those parameters
given new values by whichever keyword arguments are specified."""
return self._replace(**kwargs)
@property
def host(self):
"""The host part of the URL if available, otherwise `None`. The
host is either the hostname or the IP address mentioned in the
URL. It will not contain the port.
"""
return self._split_host()[0]
@property
def ascii_host(self):
"""Works exactly like :attr:`host` but will return a result that
is restricted to ASCII. If it finds a netloc that is not ASCII
it will attempt to idna decode it. This is useful for socket
operations when the URL might include internationalized characters.
"""
rv = self.host
if rv is not None and isinstance(rv, text_type):
try:
rv = _encode_idna(rv)
except UnicodeError:
rv = rv.encode('ascii', 'ignore')
return to_native(rv, 'ascii', 'ignore')
@property
def port(self):
"""The port in the URL as an integer if it was present, `None`
otherwise. This does not fill in default ports.
"""
try:
rv = int(to_native(self._split_host()[1]))
if 0 <= rv <= 65535:
return rv
except (ValueError, TypeError):
pass
@property
def auth(self):
"""The authentication part in the URL if available, `None`
otherwise.
"""
return self._split_netloc()[0]
@property
def username(self):
"""The username if it was part of the URL, `None` otherwise.
This undergoes URL decoding and will always be a unicode string.
"""
rv = self._split_auth()[0]
if rv is not None:
return _url_unquote_legacy(rv)
@property
def raw_username(self):
"""The username if it was part of the URL, `None` otherwise.
Unlike :attr:`username` this one is not being decoded.
"""
return self._split_auth()[0]
@property
def password(self):
"""The password if it was part of the URL, `None` otherwise.
This undergoes URL decoding and will always be a unicode string.
"""
rv = self._split_auth()[1]
if rv is not None:
return _url_unquote_legacy(rv)
@property
def raw_password(self):
"""The password if it was part of the URL, `None` otherwise.
Unlike :attr:`password` this one is not being decoded.
"""
return self._split_auth()[1]
def decode_query(self, *args, **kwargs):
"""Decodes the query part of the URL. Ths is a shortcut for
calling :func:`url_decode` on the query argument. The arguments and
keyword arguments are forwarded to :func:`url_decode` unchanged.
"""
return url_decode(self.query, *args, **kwargs)
def join(self, *args, **kwargs):
"""Joins this URL with another one. This is just a convenience
function for calling into :meth:`url_join` and then parsing the
return value again.
"""
return url_parse(url_join(self, *args, **kwargs))
def to_url(self):
"""Returns a URL string or bytes depending on the type of the
information stored. This is just a convenience function
for calling :meth:`url_unparse` for this URL.
"""
return url_unparse(self)
def decode_netloc(self):
"""Decodes the netloc part into a string."""
rv = _decode_idna(self.host or '')
if ':' in rv:
rv = '[%s]' % rv
port = self.port
if port is not None:
rv = '%s:%d' % (rv, port)
auth = ':'.join(filter(None, [
_url_unquote_legacy(self.raw_username or '', '/:%@'),
_url_unquote_legacy(self.raw_password or '', '/:%@'),
]))
if auth:
rv = '%s@%s' % (auth, rv)
return rv
def to_uri_tuple(self):
"""Returns a :class:`BytesURL` tuple that holds a URI. This will
encode all the information in the URL properly to ASCII using the
rules a web browser would follow.
It's usually more interesting to directly call :meth:`iri_to_uri` which
will return a string.
"""
return url_parse(iri_to_uri(self).encode('ascii'))
def to_iri_tuple(self):
"""Returns a :class:`URL` tuple that holds a IRI. This will try
to decode as much information as possible in the URL without
losing information similar to how a web browser does it for the
URL bar.
It's usually more interesting to directly call :meth:`uri_to_iri` which
will return a string.
"""
return url_parse(uri_to_iri(self))
def get_file_location(self, pathformat=None):
"""Returns a tuple with the location of the file in the form
``(server, location)``. If the netloc is empty in the URL or
points to localhost, it's represented as ``None``.
The `pathformat` by default is autodetection but needs to be set
when working with URLs of a specific system. The supported values
are ``'windows'`` when working with Windows or DOS paths and
``'posix'`` when working with posix paths.
If the URL does not point to to a local file, the server and location
are both represented as ``None``.
:param pathformat: The expected format of the path component.
Currently ``'windows'`` and ``'posix'`` are
supported. Defaults to ``None`` which is
autodetect.
"""
if self.scheme != 'file':
return None, None
path = url_unquote(self.path)
host = self.netloc or None
if pathformat is None:
if os.name == 'nt':
pathformat = 'windows'
else:
pathformat = 'posix'
if pathformat == 'windows':
if path[:1] == '/' and path[1:2].isalpha() and path[2:3] in '|:':
path = path[1:2] + ':' + path[3:]
windows_share = path[:3] in ('\\' * 3, '/' * 3)
import ntpath
path = ntpath.normpath(path)
# Windows shared drives are represented as ``\\host\\directory``.
# That results in a URL like ``file://///host/directory``, and a
# path like ``///host/directory``. We need to special-case this
# because the path contains the hostname.
if windows_share and host is None:
parts = path.lstrip('\\').split('\\', 1)
if len(parts) == 2:
host, path = parts
else:
host = parts[0]
path = ''
elif pathformat == 'posix':
import posixpath
path = posixpath.normpath(path)
else:
raise TypeError('Invalid path format %s' % repr(pathformat))
if host in ('127.0.0.1', '::1', 'localhost'):
host = None
return host, path
def _split_netloc(self):
if self._at in self.netloc:
return self.netloc.split(self._at, 1)
return None, self.netloc
def _split_auth(self):
auth = self._split_netloc()[0]
if not auth:
return None, None
if self._colon not in auth:
return auth, None
return auth.split(self._colon, 1)
def _split_host(self):
rv = self._split_netloc()[1]
if not rv:
return None, None
if not rv.startswith(self._lbracket):
if self._colon in rv:
return rv.split(self._colon, 1)
return rv, None
idx = rv.find(self._rbracket)
if idx < 0:
return rv, None
host = rv[1:idx]
rest = rv[idx + 1:]
if rest.startswith(self._colon):
return host, rest[1:]
return host, None
@implements_to_string
class URL(BaseURL):
"""Represents a parsed URL. This behaves like a regular tuple but
also has some extra attributes that give further insight into the
URL.
"""
__slots__ = ()
_at = '@'
_colon = ':'
_lbracket = '['
_rbracket = ']'
def __str__(self):
return self.to_url()
def encode_netloc(self):
"""Encodes the netloc part to an ASCII safe URL as bytes."""
rv = self.ascii_host or ''
if ':' in rv:
rv = '[%s]' % rv
port = self.port
if port is not None:
rv = '%s:%d' % (rv, port)
auth = ':'.join(filter(None, [
url_quote(self.raw_username or '', 'utf-8', 'strict', '/:%'),
url_quote(self.raw_password or '', 'utf-8', 'strict', '/:%'),
]))
if auth:
rv = '%s@%s' % (auth, rv)
return to_native(rv)
def encode(self, charset='utf-8', errors='replace'):
"""Encodes the URL to a tuple made out of bytes. The charset is
only being used for the path, query and fragment.
"""
return BytesURL(
self.scheme.encode('ascii'),
self.encode_netloc(),
self.path.encode(charset, errors),
self.query.encode(charset, errors),
self.fragment.encode(charset, errors)
)
class BytesURL(BaseURL):
"""Represents a parsed URL in bytes."""
__slots__ = ()
_at = b'@'
_colon = b':'
_lbracket = b'['
_rbracket = b']'
def __str__(self):
return self.to_url().decode('utf-8', 'replace')
def encode_netloc(self):
"""Returns the netloc unchanged as bytes."""
return self.netloc
def decode(self, charset='utf-8', errors='replace'):
"""Decodes the URL to a tuple made out of strings. The charset is
only being used for the path, query and fragment.
"""
return URL(
self.scheme.decode('ascii'),
self.decode_netloc(),
self.path.decode(charset, errors),
self.query.decode(charset, errors),
self.fragment.decode(charset, errors)
)
def _unquote_to_bytes(string, unsafe=''):
if isinstance(string, text_type):
string = string.encode('utf-8')
if isinstance(unsafe, text_type):
unsafe = unsafe.encode('utf-8')
unsafe = frozenset(bytearray(unsafe))
bits = iter(string.split(b'%'))
result = bytearray(next(bits, b''))
for item in bits:
try:
char = _hextobyte[item[:2]]
if char in unsafe:
raise KeyError()
result.append(char)
result.extend(item[2:])
except KeyError:
result.extend(b'%')
result.extend(item)
return bytes(result)
def _url_encode_impl(obj, charset, encode_keys, sort, key):
iterable = iter_multi_items(obj)
if sort:
iterable = sorted(iterable, key=key)
for key, value in iterable:
if value is None:
continue
if not isinstance(key, bytes):
key = text_type(key).encode(charset)
if not isinstance(value, bytes):
value = text_type(value).encode(charset)
yield url_quote_plus(key) + '=' + url_quote_plus(value)
def _url_unquote_legacy(value, unsafe=''):
try:
return url_unquote(value, charset='utf-8',
errors='strict', unsafe=unsafe)
except UnicodeError:
return url_unquote(value, charset='latin1', unsafe=unsafe)
def url_parse(url, scheme=None, allow_fragments=True):
"""Parses a URL from a string into a :class:`URL` tuple. If the URL
is lacking a scheme it can be provided as second argument. Otherwise,
it is ignored. Optionally fragments can be stripped from the URL
by setting `allow_fragments` to `False`.
The inverse of this function is :func:`url_unparse`.
:param url: the URL to parse.
:param scheme: the default schema to use if the URL is schemaless.
:param allow_fragments: if set to `False` a fragment will be removed
from the URL.
"""
s = make_literal_wrapper(url)
is_text_based = isinstance(url, text_type)
if scheme is None:
scheme = s('')
netloc = query = fragment = s('')
i = url.find(s(':'))
if i > 0 and _scheme_re.match(to_native(url[:i], errors='replace')):
# make sure "iri" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i + 1:]
if not rest or any(c not in s('0123456789') for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest
if url[:2] == s('//'):
delim = len(url)
for c in s('/?#'):
wdelim = url.find(c, 2)
if wdelim >= 0:
delim = min(delim, wdelim)
netloc, url = url[2:delim], url[delim:]
if (s('[') in netloc and s(']') not in netloc) or \
(s(']') in netloc and s('[') not in netloc):
raise ValueError('Invalid IPv6 URL')
if allow_fragments and s('#') in url:
url, fragment = url.split(s('#'), 1)
if s('?') in url:
url, query = url.split(s('?'), 1)
result_type = is_text_based and URL or BytesURL
return result_type(scheme, netloc, url, query, fragment)
def url_quote(string, charset='utf-8', errors='strict', safe='/:', unsafe=''):
"""URL encode a single string with a given encoding.
:param s: the string to quote.
:param charset: the charset to be used.
:param safe: an optional sequence of safe characters.
:param unsafe: an optional sequence of unsafe characters.
.. versionadded:: 0.9.2
The `unsafe` parameter was added.
"""
if not isinstance(string, (text_type, bytes, bytearray)):
string = text_type(string)
if isinstance(string, text_type):
string = string.encode(charset, errors)
if isinstance(safe, text_type):
safe = safe.encode(charset, errors)
if isinstance(unsafe, text_type):
unsafe = unsafe.encode(charset, errors)
safe = frozenset(bytearray(safe) + _always_safe) - frozenset(bytearray(unsafe))
rv = bytearray()
for char in bytearray(string):
if char in safe:
rv.append(char)
else:
rv.extend(('%%%02X' % char).encode('ascii'))
return to_native(bytes(rv))
def url_quote_plus(string, charset='utf-8', errors='strict', safe=''):
"""URL encode a single string with the given encoding and convert
whitespace to "+".
:param s: The string to quote.
:param charset: The charset to be used.
:param safe: An optional sequence of safe characters.
"""
return url_quote(string, charset, errors, safe + ' ', '+').replace(' ', '+')
def url_unparse(components):
"""The reverse operation to :meth:`url_parse`. This accepts arbitrary
as well as :class:`URL` tuples and returns a URL as a string.
:param components: the parsed URL as tuple which should be converted
into a URL string.
"""
scheme, netloc, path, query, fragment = \
normalize_string_tuple(components)
s = make_literal_wrapper(scheme)
url = s('')
# We generally treat file:///x and file:/x the same which is also
# what browsers seem to do. This also allows us to ignore a schema
# register for netloc utilization or having to differenciate between
# empty and missing netloc.
if netloc or (scheme and path.startswith(s('/'))):
if path and path[:1] != s('/'):
path = s('/') + path
url = s('//') + (netloc or s('')) + path
elif path:
url += path
if scheme:
url = scheme + s(':') + url
if query:
url = url + s('?') + query
if fragment:
url = url + s('#') + fragment
return url
def url_unquote(string, charset='utf-8', errors='replace', unsafe=''):
"""URL decode a single string with a given encoding. If the charset
is set to `None` no unicode decoding is performed and raw bytes
are returned.
:param s: the string to unquote.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param errors: the error handling for the charset decoding.
"""
rv = _unquote_to_bytes(string, unsafe)
if charset is not None:
rv = rv.decode(charset, errors)
return rv
def url_unquote_plus(s, charset='utf-8', errors='replace'):
"""URL decode a single string with the given `charset` and decode "+" to
whitespace.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
:exc:`HTTPUnicodeError` is raised.
:param s: The string to unquote.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param errors: The error handling for the `charset` decoding.
"""
if isinstance(s, text_type):
s = s.replace(u'+', u' ')
else:
s = s.replace(b'+', b' ')
return url_unquote(s, charset, errors)
def url_fix(s, charset='utf-8'):
r"""Sometimes you get an URL by a user that just isn't a real URL because
it contains unsafe characters like ' ' and so on. This function can fix
some of the problems in a similar way browsers handle data entered by the
user:
>>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)')
'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)'
:param s: the string with the URL to fix.
:param charset: The target charset for the URL if the url was given as
unicode string.
"""
# First step is to switch to unicode processing and to convert
# backslashes (which are invalid in URLs anyways) to slashes. This is
# consistent with what Chrome does.
s = to_unicode(s, charset, 'replace').replace('\\', '/')
# For the specific case that we look like a malformed windows URL
# we want to fix this up manually:
if s.startswith('file://') and s[7:8].isalpha() and s[8:10] in (':/', '|/'):
s = 'file:///' + s[7:]
url = url_parse(s)
path = url_quote(url.path, charset, safe='/%+$!*\'(),')
qs = url_quote_plus(url.query, charset, safe=':&%=+$!*\'(),')
anchor = url_quote_plus(url.fragment, charset, safe=':&%=+$!*\'(),')
return to_native(url_unparse((url.scheme, url.encode_netloc(),
path, qs, anchor)))
def uri_to_iri(uri, charset='utf-8', errors='replace'):
r"""
Converts a URI in a given charset to a IRI.
Examples for URI versus IRI:
>>> uri_to_iri(b'http://xn--n3h.net/')
u'http://\u2603.net/'
>>> uri_to_iri(b'http://%C3%BCser:p%C3%[email protected]/p%C3%A5th')
u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
Query strings are left unchanged:
>>> uri_to_iri('/?foo=24&x=%26%2f')
u'/?foo=24&x=%26%2f'
.. versionadded:: 0.6
:param uri: The URI to convert.
:param charset: The charset of the URI.
:param errors: The error handling on decode.
"""
if isinstance(uri, tuple):
uri = url_unparse(uri)
uri = url_parse(to_unicode(uri, charset))
path = url_unquote(uri.path, charset, errors, '%/;?')
query = url_unquote(uri.query, charset, errors, '%;/?:@&=+,$#')
fragment = url_unquote(uri.fragment, charset, errors, '%;/?:@&=+,$#')
return url_unparse((uri.scheme, uri.decode_netloc(),
path, query, fragment))
def iri_to_uri(iri, charset='utf-8', errors='strict', safe_conversion=False):
r"""
Converts any unicode based IRI to an acceptable ASCII URI. Werkzeug always
uses utf-8 URLs internally because this is what browsers and HTTP do as
well. In some places where it accepts an URL it also accepts a unicode IRI
and converts it into a URI.
Examples for IRI versus URI:
>>> iri_to_uri(u'http://☃.net/')
'http://xn--n3h.net/'
>>> iri_to_uri(u'http://üser:pässword@☃.net/påth')
'http://%C3%BCser:p%C3%[email protected]/p%C3%A5th'
There is a general problem with IRI and URI conversion with some
protocols that appear in the wild that are in violation of the URI
specification. In places where Werkzeug goes through a forced IRI to
URI conversion it will set the `safe_conversion` flag which will
not perform a conversion if the end result is already ASCII. This
can mean that the return value is not an entirely correct URI but
it will not destroy such invalid URLs in the process.
As an example consider the following two IRIs::
magnet:?xt=uri:whatever
itms-services://?action=download-manifest
The internal representation after parsing of those URLs is the same
and there is no way to reconstruct the original one. If safe
conversion is enabled however this function becomes a noop for both of
those strings as they both can be considered URIs.
.. versionadded:: 0.6
.. versionchanged:: 0.9.6
The `safe_conversion` parameter was added.
:param iri: The IRI to convert.
:param charset: The charset for the URI.
:param safe_conversion: indicates if a safe conversion should take place.
For more information see the explanation above.
"""
if isinstance(iri, tuple):
iri = url_unparse(iri)
if safe_conversion:
try:
native_iri = to_native(iri)
ascii_iri = to_native(iri).encode('ascii')
if ascii_iri.split() == [ascii_iri]:
return native_iri
except UnicodeError:
pass
iri = url_parse(to_unicode(iri, charset, errors))
netloc = iri.encode_netloc()
path = url_quote(iri.path, charset, errors, '/:~+%')
query = url_quote(iri.query, charset, errors, '%&[]:;$*()+,!?*/=')
fragment = url_quote(iri.fragment, charset, errors, '=%&[]:;$()+,!?*/')
return to_native(url_unparse((iri.scheme, netloc,
path, query, fragment)))
def url_decode(s, charset='utf-8', decode_keys=False, include_empty=True,
errors='replace', separator='&', cls=None):
"""
Parse a querystring and return it as :class:`MultiDict`. There is a
difference in key decoding on different Python versions. On Python 3
keys will always be fully decoded whereas on Python 2, keys will
remain bytestrings if they fit into ASCII. On 2.x keys can be forced
to be unicode by setting `decode_keys` to `True`.
If the charset is set to `None` no unicode decoding will happen and
raw bytes will be returned.
Per default a missing value for a key will default to an empty key. If
you don't want that behavior you can set `include_empty` to `False`.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
`HTTPUnicodeError` is raised.
.. versionchanged:: 0.5
In previous versions ";" and "&" could be used for url decoding.
This changed in 0.5 where only "&" is supported. If you want to
use ";" instead a different `separator` can be provided.
The `cls` parameter was added.
:param s: a string with the query string to decode.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param decode_keys: Used on Python 2.x to control whether keys should
be forced to be unicode objects. If set to `True`
then keys will be unicode in all cases. Otherwise,
they remain `str` if they fit into ASCII.
:param include_empty: Set to `False` if you don't want empty values to
appear in the dict.
:param errors: the decoding error behavior.
:param separator: the pair separator to be used, defaults to ``&``
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
"""
if cls is None:
cls = MultiDict
if isinstance(s, text_type) and not isinstance(separator, text_type):
separator = separator.decode(charset or 'ascii')
elif isinstance(s, bytes) and not isinstance(separator, bytes):
separator = separator.encode(charset or 'ascii')
return cls(_url_decode_impl(s.split(separator), charset, decode_keys,
include_empty, errors))
def url_decode_stream(stream, charset='utf-8', decode_keys=False,
include_empty=True, errors='replace', separator='&',
cls=None, limit=None, return_iterator=False):
"""Works like :func:`url_decode` but decodes a stream. The behavior
of stream and limit follows functions like
:func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is
directly fed to the `cls` so you can consume the data while it's
parsed.
.. versionadded:: 0.8
:param stream: a stream with the encoded querystring
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param decode_keys: Used on Python 2.x to control whether keys should
be forced to be unicode objects. If set to `True`,
keys will be unicode in all cases. Otherwise, they
remain `str` if they fit into ASCII.
:param include_empty: Set to `False` if you don't want empty values to
appear in the dict.
:param errors: the decoding error behavior.
:param separator: the pair separator to be used, defaults to ``&``
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
:param limit: the content length of the URL data. Not necessary if
a limited stream is provided.
:param return_iterator: if set to `True` the `cls` argument is ignored
and an iterator over all decoded pairs is
returned
"""
from werkzeug.wsgi import make_chunk_iter
if return_iterator:
cls = lambda x: x
elif cls is None:
cls = MultiDict
pair_iter = make_chunk_iter(stream, separator, limit)
return cls(_url_decode_impl(pair_iter, charset, decode_keys,
include_empty, errors))
def _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors):
for pair in pair_iter:
if not pair:
continue
s = make_literal_wrapper(pair)
equal = s('=')
if equal in pair:
key, value = pair.split(equal, 1)
else:
if not include_empty:
continue
key = pair
value = s('')
key = url_unquote_plus(key, charset, errors)
if charset is not None and PY2 and not decode_keys:
key = try_coerce_native(key)
yield key, url_unquote_plus(value, charset, errors)
def url_encode(obj, charset='utf-8', encode_keys=False, sort=False, key=None,
separator=b'&'):
"""URL encode a dict/`MultiDict`. If a value is `None` it will not appear
in the result string. Per default only values are encoded into the target
charset strings. If `encode_keys` is set to ``True`` unicode keys are
supported too.
If `sort` is set to `True` the items are sorted by `key` or the default
sorting algorithm.
.. versionadded:: 0.5
`sort`, `key`, and `separator` were added.
:param obj: the object to encode into a query string.
:param charset: the charset of the query string.
:param encode_keys: set to `True` if you have unicode keys. (Ignored on
Python 3.x)
:param sort: set to `True` if you want parameters to be sorted by `key`.
:param separator: the separator to be used for the pairs.
:param key: an optional function to be used for sorting. For more details
check out the :func:`sorted` documentation.
"""
separator = to_native(separator, 'ascii')
return separator.join(_url_encode_impl(obj, charset, encode_keys, sort, key))
def url_encode_stream(obj, stream=None, charset='utf-8', encode_keys=False,
sort=False, key=None, separator=b'&'):
"""Like :meth:`url_encode` but writes the results to a stream
object. If the stream is `None` a generator over all encoded
pairs is returned.
.. versionadded:: 0.8
:param obj: the object to encode into a query string.
:param stream: a stream to write the encoded object into or `None` if
an iterator over the encoded pairs should be returned. In
that case the separator argument is ignored.
:param charset: the charset of the query string.
:param encode_keys: set to `True` if you have unicode keys. (Ignored on
Python 3.x)
:param sort: set to `True` if you want parameters to be sorted by `key`.
:param separator: the separator to be used for the pairs.
:param key: an optional function to be used for sorting. For more details
check out the :func:`sorted` documentation.
"""
separator = to_native(separator, 'ascii')
gen = _url_encode_impl(obj, charset, encode_keys, sort, key)
if stream is None:
return gen
for idx, chunk in enumerate(gen):
if idx:
stream.write(separator)
stream.write(chunk)
def url_join(base, url, allow_fragments=True):
"""Join a base URL and a possibly relative URL to form an absolute
interpretation of the latter.
:param base: the base URL for the join operation.
:param url: the URL to join.
:param allow_fragments: indicates whether fragments should be allowed.
"""
if isinstance(base, tuple):
base = url_unparse(base)
if isinstance(url, tuple):
url = url_unparse(url)
base, url = normalize_string_tuple((base, url))
s = make_literal_wrapper(base)
if not base:
return url
if not url:
return base
bscheme, bnetloc, bpath, bquery, bfragment = \
url_parse(base, allow_fragments=allow_fragments)
scheme, netloc, path, query, fragment = \
url_parse(url, bscheme, allow_fragments)
if scheme != bscheme:
return url
if netloc:
return url_unparse((scheme, netloc, path, query, fragment))
netloc = bnetloc
if path[:1] == s('/'):
segments = path.split(s('/'))
elif not path:
segments = bpath.split(s('/'))
if not query:
query = bquery
else:
segments = bpath.split(s('/'))[:-1] + path.split(s('/'))
# If the rightmost part is "./" we want to keep the slash but
# remove the dot.
if segments[-1] == s('.'):
segments[-1] = s('')
# Resolve ".." and "."
segments = [segment for segment in segments if segment != s('.')]
while 1:
i = 1
n = len(segments) - 1
while i < n:
if segments[i] == s('..') and \
segments[i - 1] not in (s(''), s('..')):
del segments[i - 1:i + 1]
break
i += 1
else:
break
# Remove trailing ".." if the URL is absolute
unwanted_marker = [s(''), s('..')]
while segments[:2] == unwanted_marker:
del segments[1]
path = s('/').join(segments)
return url_unparse((scheme, netloc, path, query, fragment))
class Href(object):
"""Implements a callable that constructs URLs with the given base. The
function can be called with any number of positional and keyword
arguments which than are used to assemble the URL. Works with URLs
and posix paths.
Positional arguments are appended as individual segments to
the path of the URL:
>>> href = Href('/foo')
>>> href('bar', 23)
'/foo/bar/23'
>>> href('foo', bar=23)
'/foo/foo?bar=23'
If any of the arguments (positional or keyword) evaluates to `None` it
will be skipped. If no keyword arguments are given the last argument
can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass),
otherwise the keyword arguments are used for the query parameters, cutting
off the first trailing underscore of the parameter name:
>>> href(is_=42)
'/foo?is=42'
>>> href({'foo': 'bar'})
'/foo?foo=bar'
Combining of both methods is not allowed:
>>> href({'foo': 'bar'}, bar=42)
Traceback (most recent call last):
...
TypeError: keyword arguments and query-dicts can't be combined
Accessing attributes on the href object creates a new href object with
the attribute name as prefix:
>>> bar_href = href.bar
>>> bar_href("blub")
'/foo/bar/blub'
If `sort` is set to `True` the items are sorted by `key` or the default
sorting algorithm:
>>> href = Href("/", sort=True)
>>> href(a=1, b=2, c=3)
'/?a=1&b=2&c=3'
.. versionadded:: 0.5
`sort` and `key` were added.
"""
def __init__(self, base='./', charset='utf-8', sort=False, key=None):
if not base:
base = './'
self.base = base
self.charset = charset
self.sort = sort
self.key = key
def __getattr__(self, name):
if name[:2] == '__':
raise AttributeError(name)
base = self.base
if base[-1:] != '/':
base += '/'
return Href(url_join(base, name), self.charset, self.sort, self.key)
def __call__(self, *path, **query):
if path and isinstance(path[-1], dict):
if query:
raise TypeError('keyword arguments and query-dicts '
'can\'t be combined')
query, path = path[-1], path[:-1]
elif query:
query = dict([(k.endswith('_') and k[:-1] or k, v)
for k, v in query.items()])
path = '/'.join([to_unicode(url_quote(x, self.charset), 'ascii')
for x in path if x is not None]).lstrip('/')
rv = self.base
if path:
if not rv.endswith('/'):
rv += '/'
rv = url_join(rv, './' + path)
if query:
rv += '?' + to_unicode(url_encode(query, self.charset, sort=self.sort,
key=self.key), 'ascii')
return to_native(rv)
| mit | -6,287,260,683,755,239,000 | -2,170,534,332,217,179,400 | 35.556773 | 83 | 0.591042 | false |
Thor77/youtube-dl | youtube_dl/extractor/esri.py | 35 | 2627 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
int_or_none,
parse_filesize,
unified_strdate,
)
class EsriVideoIE(InfoExtractor):
_VALID_URL = r'https?://video\.esri\.com/watch/(?P<id>[0-9]+)'
_TEST = {
'url': 'https://video.esri.com/watch/1124/arcgis-online-_dash_-developing-applications',
'md5': 'd4aaf1408b221f1b38227a9bbaeb95bc',
'info_dict': {
'id': '1124',
'ext': 'mp4',
'title': 'ArcGIS Online - Developing Applications',
'description': 'Jeremy Bartley demonstrates how to develop applications with ArcGIS Online.',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 185,
'upload_date': '20120419',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
formats = []
for width, height, content in re.findall(
r'(?s)<li><strong>(\d+)x(\d+):</strong>(.+?)</li>', webpage):
for video_url, ext, filesize in re.findall(
r'<a[^>]+href="([^"]+)">([^<]+) \(([^<]+)\)</a>', content):
formats.append({
'url': compat_urlparse.urljoin(url, video_url),
'ext': ext.lower(),
'format_id': '%s-%s' % (ext.lower(), height),
'width': int(width),
'height': int(height),
'filesize_approx': parse_filesize(filesize),
})
self._sort_formats(formats)
title = self._html_search_meta('title', webpage, 'title')
description = self._html_search_meta(
'description', webpage, 'description', fatal=False)
thumbnail = self._html_search_meta('thumbnail', webpage, 'thumbnail', fatal=False)
if thumbnail:
thumbnail = re.sub(r'_[st]\.jpg$', '_x.jpg', thumbnail)
duration = int_or_none(self._search_regex(
[r'var\s+videoSeconds\s*=\s*(\d+)', r"'duration'\s*:\s*(\d+)"],
webpage, 'duration', fatal=False))
upload_date = unified_strdate(self._html_search_meta(
'last-modified', webpage, 'upload date', fatal=False))
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'upload_date': upload_date,
'formats': formats
}
| unlicense | -9,167,822,977,014,653,000 | -3,772,570,249,136,655,000 | 34.5 | 105 | 0.523791 | false |
blink1073/scikit-image | skimage/segmentation/tests/test_felzenszwalb.py | 27 | 2021 | import numpy as np
from numpy.testing import assert_equal, assert_array_equal
from skimage._shared.testing import assert_greater, test_parallel
from skimage.segmentation import felzenszwalb
from skimage import data
@test_parallel()
def test_grey():
# very weak tests. This algorithm is pretty unstable.
img = np.zeros((20, 21))
img[:10, 10:] = 0.2
img[10:, :10] = 0.4
img[10:, 10:] = 0.6
seg = felzenszwalb(img, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
# that mostly respect the 4 regions:
for i in range(4):
hist = np.histogram(img[seg == i], bins=[0, 0.1, 0.3, 0.5, 1])[0]
assert_greater(hist[i], 40)
def test_minsize():
# single-channel:
img = data.coins()[20:168,0:128]
for min_size in np.arange(10, 100, 10):
segments = felzenszwalb(img, min_size=min_size, sigma=3)
counts = np.bincount(segments.ravel())
# actually want to test greater or equal.
assert_greater(counts.min() + 1, min_size)
# multi-channel:
coffee = data.coffee()[::4, ::4]
for min_size in np.arange(10, 100, 10):
segments = felzenszwalb(coffee, min_size=min_size, sigma=3)
counts = np.bincount(segments.ravel())
# actually want to test greater or equal.
# the construction doesn't guarantee min_size is respected
# after intersecting the sementations for the colors
assert_greater(np.mean(counts) + 1, min_size)
def test_color():
# very weak tests. This algorithm is pretty unstable.
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
seg = felzenszwalb(img, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
assert_array_equal(seg[:10, :10], 0)
assert_array_equal(seg[10:, :10], 2)
assert_array_equal(seg[:10, 10:], 1)
assert_array_equal(seg[10:, 10:], 3)
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
| bsd-3-clause | -7,808,580,361,680,550,000 | 2,108,724,043,176,737,300 | 33.254237 | 73 | 0.622959 | false |
thomasquintana/jobber | jobber/core/actor/mailbox.py | 1 | 1271 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Thomas Quintana <[email protected]>
from collections import deque
class Mailbox(object):
"""
Implements the Mailbox
Ensures FIFO
"""
def __init__(self):
self._box = deque()
def append(self, message):
self._box.append(message)
def first(self):
return self._box[0]
def pop(self):
return self._box.popleft()
def flush(self):
self._box.clear()
def __len__(self):
return len(self._box)
| apache-2.0 | -802,551,568,256,218,200 | -2,766,712,536,307,363,300 | 28.55814 | 62 | 0.697876 | false |
Hoekz/hackness-monster | venv/lib/python2.7/site-packages/pip/vcs/bazaar.py | 514 | 3803 | from __future__ import absolute_import
import logging
import os
import tempfile
# TODO: Get this into six.moves.urllib.parse
try:
from urllib import parse as urllib_parse
except ImportError:
import urlparse as urllib_parse
from pip.utils import rmtree, display_path
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
logger = logging.getLogger(__name__)
class Bazaar(VersionControl):
name = 'bzr'
dirname = '.bzr'
repo_name = 'branch'
schemes = (
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
'bzr+lp',
)
def __init__(self, url=None, *args, **kwargs):
super(Bazaar, self).__init__(url, *args, **kwargs)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
# Register lp but do not expose as a scheme to support bzr+lp.
if getattr(urllib_parse, 'uses_fragment', None):
urllib_parse.uses_fragment.extend(['lp'])
urllib_parse.non_hierarchical.extend(['lp'])
def export(self, location):
"""
Export the Bazaar repository at the url to the destination location
"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
if os.path.exists(location):
# Remove the location to make sure Bazaar can export it correctly
rmtree(location)
try:
self.run_command(['export', location], cwd=temp_dir,
show_stdout=False)
finally:
rmtree(temp_dir)
def switch(self, dest, url, rev_options):
self.run_command(['switch', url], cwd=dest)
def update(self, dest, rev_options):
self.run_command(['pull', '-q'] + rev_options, cwd=dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = ['-r', rev]
rev_display = ' (to revision %s)' % rev
else:
rev_options = []
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['branch', '-q'] + rev_options + [url, dest])
def get_url_rev(self):
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
url, rev = super(Bazaar, self).get_url_rev()
if url.startswith('ssh://'):
url = 'bzr+' + url
return url, rev
def get_url(self, location):
urls = self.run_command(['info'], show_stdout=False, cwd=location)
for line in urls.splitlines():
line = line.strip()
for x in ('checkout of branch: ',
'parent branch: '):
if line.startswith(x):
repo = line.split(x)[1]
if self._is_local_repository(repo):
return path_to_url(repo)
return repo
return None
def get_revision(self, location):
revision = self.run_command(
['revno'], show_stdout=False, cwd=location)
return revision.splitlines()[-1]
def get_src_requirement(self, dist, location):
repo = self.get_url(location)
if not repo:
return None
if not repo.lower().startswith('bzr:'):
repo = 'bzr+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
current_rev = self.get_revision(location)
return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
def check_version(self, dest, rev_options):
"""Always assume the versions don't match"""
return False
vcs.register(Bazaar)
| mit | -74,895,913,277,017,060 | -7,189,104,535,305,789,000 | 31.784483 | 77 | 0.55614 | false |
russelmahmud/mess-account | django/contrib/sessions/backends/db.py | 232 | 2756 | import datetime
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation
from django.db import IntegrityError, transaction, router
from django.utils.encoding import force_unicode
class SessionStore(SessionBase):
"""
Implements database session store.
"""
def __init__(self, session_key=None):
super(SessionStore, self).__init__(session_key)
def load(self):
try:
s = Session.objects.get(
session_key = self.session_key,
expire_date__gt=datetime.datetime.now()
)
return self.decode(force_unicode(s.session_data))
except (Session.DoesNotExist, SuspiciousOperation):
self.create()
return {}
def exists(self, session_key):
try:
Session.objects.get(session_key=session_key)
except Session.DoesNotExist:
return False
return True
def create(self):
while True:
self.session_key = self._get_new_session_key()
try:
# Save immediately to ensure we have a unique entry in the
# database.
self.save(must_create=True)
except CreateError:
# Key wasn't unique. Try again.
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
"""
Saves the current session data to the database. If 'must_create' is
True, a database error will be raised if the saving operation doesn't
create a *new* entry (as opposed to possibly updating an existing
entry).
"""
obj = Session(
session_key = self.session_key,
session_data = self.encode(self._get_session(no_load=must_create)),
expire_date = self.get_expiry_date()
)
using = router.db_for_write(Session, instance=obj)
sid = transaction.savepoint(using=using)
try:
obj.save(force_insert=must_create, using=using)
except IntegrityError:
if must_create:
transaction.savepoint_rollback(sid, using=using)
raise CreateError
raise
def delete(self, session_key=None):
if session_key is None:
if self._session_key is None:
return
session_key = self._session_key
try:
Session.objects.get(session_key=session_key).delete()
except Session.DoesNotExist:
pass
# At bottom to avoid circular import
from django.contrib.sessions.models import Session
| bsd-3-clause | 8,839,586,994,234,592,000 | -8,559,229,335,912,300,000 | 32.609756 | 79 | 0.593614 | false |
huggingface/pytorch-transformers | src/transformers/modeling_flax_pytorch_utils.py | 1 | 9873 | # coding=utf-8
# Copyright 2021 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch - Flax general utilities."""
import os
from pickle import UnpicklingError
import numpy as np
import jax.numpy as jnp
import transformers
from flax.serialization import from_bytes
from flax.traverse_util import flatten_dict, unflatten_dict
from .utils import logging
logger = logging.get_logger(__name__)
#####################
# PyTorch => Flax #
#####################
def load_pytorch_checkpoint_in_flax_state_dict(flax_model, pytorch_checkpoint_path, allow_missing_keys=False):
"""Load pytorch checkpoints in a flax model"""
try:
import torch # noqa: F401
except ImportError:
logger.error(
"Loading a PyTorch model in Flax, requires both PyTorch and Flax to be installed. Please see "
"https://pytorch.org/ and https://flax.readthedocs.io/en/latest/installation.html for installation instructions."
)
raise
pt_path = os.path.abspath(pytorch_checkpoint_path)
logger.info(f"Loading PyTorch weights from {pt_path}")
pt_state_dict = torch.load(pt_path, map_location="cpu")
logger.info(f"PyTorch checkpoint contains {sum(t.numel() for t in pt_state_dict.values()):,} parameters.")
flax_state_dict = convert_pytorch_state_dict_to_flax(pt_state_dict, flax_model)
return flax_state_dict
def convert_pytorch_state_dict_to_flax(pt_state_dict, flax_model):
# convert pytorch tensor to numpy
pt_state_dict = {k: v.numpy() for k, v in pt_state_dict.items()}
random_flax_state_dict = flatten_dict(flax_model.params)
flax_state_dict = {}
remove_base_model_prefix = (flax_model.base_model_prefix not in flax_model.params) and (
flax_model.base_model_prefix in set([k.split(".")[0] for k in pt_state_dict.keys()])
)
add_base_model_prefix = (flax_model.base_model_prefix in flax_model.params) and (
flax_model.base_model_prefix not in set([k.split(".")[0] for k in pt_state_dict.keys()])
)
# Need to change some parameters name to match Flax names so that we don't have to fork any layer
for pt_key, pt_tensor in pt_state_dict.items():
pt_tuple_key = tuple(pt_key.split("."))
has_base_model_prefix = pt_tuple_key[0] == flax_model.base_model_prefix
require_base_model_prefix = (flax_model.base_model_prefix,) + pt_tuple_key in random_flax_state_dict
if remove_base_model_prefix and has_base_model_prefix:
pt_tuple_key = pt_tuple_key[1:]
elif add_base_model_prefix and require_base_model_prefix:
pt_tuple_key = (flax_model.base_model_prefix,) + pt_tuple_key
# Correctly rename weight parameters
if pt_tuple_key[-1] in ["weight", "gamma"] and pt_tuple_key[:-1] + ("scale",) in random_flax_state_dict:
pt_tuple_key = pt_tuple_key[:-1] + ("scale",)
if pt_tuple_key[-1] == "weight" and pt_tuple_key[:-1] + ("embedding",) in random_flax_state_dict:
pt_tuple_key = pt_tuple_key[:-1] + ("embedding",)
elif pt_tuple_key[-1] == "weight" and pt_tuple_key not in random_flax_state_dict:
pt_tuple_key = pt_tuple_key[:-1] + ("kernel",)
pt_tensor = pt_tensor.T
elif pt_tuple_key[-1] == "gamma":
pt_tuple_key = pt_tuple_key[:-1] + ("weight",)
elif pt_tuple_key[-1] == "beta":
pt_tuple_key = pt_tuple_key[:-1] + ("bias",)
if pt_tuple_key in random_flax_state_dict:
if pt_tensor.shape != random_flax_state_dict[pt_tuple_key].shape:
raise ValueError(
"PyTorch checkpoint seems to be incorrect. Weight {pt_key} was expected to be of shape {random_flax_state_dict[pt_tuple_key].shape}, but is {pt_tensor.shape}."
)
# also add unexpected weight so that warning is thrown
flax_state_dict[pt_tuple_key] = jnp.asarray(pt_tensor)
return unflatten_dict(flax_state_dict)
#####################
# Flax => PyTorch #
#####################
def load_flax_checkpoint_in_pytorch_model(model, flax_checkpoint_path):
"""Load flax checkpoints in a PyTorch model"""
flax_checkpoint_path = os.path.abspath(flax_checkpoint_path)
logger.info(f"Loading Flax weights from {flax_checkpoint_path}")
# import correct flax class
flax_cls = getattr(transformers, "Flax" + model.__class__.__name__)
# load flax weight dict
with open(flax_checkpoint_path, "rb") as state_f:
try:
flax_state_dict = from_bytes(flax_cls, state_f.read())
except UnpicklingError:
raise EnvironmentError(f"Unable to convert {flax_checkpoint_path} to Flax deserializable object. ")
return load_flax_weights_in_pytorch_model(model, flax_state_dict)
def load_flax_weights_in_pytorch_model(pt_model, flax_state):
"""Load flax checkpoints in a PyTorch model"""
try:
import torch # noqa: F401
except ImportError:
logger.error(
"Loading a Flax weights in PyTorch, requires both PyTorch and Flax to be installed. Please see "
"https://pytorch.org/ and https://flax.readthedocs.io/en/latest/installation.html for installation instructions."
)
raise
flax_state_dict = flatten_dict(flax_state)
pt_model_dict = pt_model.state_dict()
remove_base_model_prefix = (pt_model.base_model_prefix in flax_state) and (
pt_model.base_model_prefix not in set([k.split(".")[0] for k in pt_model_dict.keys()])
)
add_base_model_prefix = (pt_model.base_model_prefix not in flax_state) and (
pt_model.base_model_prefix in set([k.split(".")[0] for k in pt_model_dict.keys()])
)
# keep track of unexpected & missing keys
unexpected_keys = []
missing_keys = set(pt_model_dict.keys())
for flax_key_tuple, flax_tensor in flax_state_dict.items():
has_base_model_prefix = flax_key_tuple[0] == pt_model.base_model_prefix
require_base_model_prefix = ".".join((pt_model.base_model_prefix,) + flax_key_tuple) in pt_model_dict
# adapt flax_key to prepare for loading from/to base model only
if remove_base_model_prefix and has_base_model_prefix:
flax_key_tuple = flax_key_tuple[1:]
elif add_base_model_prefix and require_base_model_prefix:
flax_key_tuple = (pt_model.base_model_prefix,) + flax_key_tuple
# rename flax weights to PyTorch format
if flax_key_tuple[-1] == "kernel" and ".".join(flax_key_tuple) not in pt_model_dict:
flax_key_tuple = flax_key_tuple[:-1] + ("weight",)
flax_tensor = flax_tensor.T
elif flax_key_tuple[-1] in ["scale", "embedding"]:
flax_key_tuple = flax_key_tuple[:-1] + ("weight",)
flax_key = ".".join(flax_key_tuple)
if flax_key in pt_model_dict:
if flax_tensor.shape != pt_model_dict[flax_key].shape:
raise ValueError(
f"Flax checkpoint seems to be incorrect. Weight {flax_key_tuple} was expected"
f"to be of shape {pt_model_dict[flax_key].shape}, but is {flax_tensor.shape}."
)
else:
# add weight to pytorch dict
flax_tensor = np.asarray(flax_tensor) if not isinstance(flax_tensor, np.ndarray) else flax_tensor
pt_model_dict[flax_key] = torch.from_numpy(flax_tensor)
# remove from missing keys
missing_keys.remove(flax_key)
else:
# weight is not expected by PyTorch model
unexpected_keys.append(flax_key)
pt_model.load_state_dict(pt_model_dict)
# re-transform missing_keys to list
missing_keys = list(missing_keys)
if len(unexpected_keys) > 0:
logger.warning(
"Some weights of the Flax model were not used when "
f"initializing the PyTorch model {pt_model.__class__.__name__}: {unexpected_keys}\n"
f"- This IS expected if you are initializing {pt_model.__class__.__name__} from a Flax model trained on another task "
"or with another architecture (e.g. initializing a BertForSequenceClassification model from a FlaxBertForPreTraining model).\n"
f"- This IS NOT expected if you are initializing {pt_model.__class__.__name__} from a Flax model that you expect "
"to be exactly identical (e.g. initializing a BertForSequenceClassification model from a FlaxBertForSequenceClassification model)."
)
else:
logger.warning(f"All Flax model weights were used when initializing {pt_model.__class__.__name__}.\n")
if len(missing_keys) > 0:
logger.warning(
f"Some weights of {pt_model.__class__.__name__} were not initialized from the Flax model "
f"and are newly initialized: {missing_keys}\n"
"You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference."
)
else:
logger.warning(
f"All the weights of {pt_model.__class__.__name__} were initialized from the Flax model.\n"
"If your task is similar to the task the model of the checkpoint was trained on, "
f"you can already use {pt_model.__class__.__name__} for predictions without further training."
)
return pt_model
| apache-2.0 | -4,569,772,984,410,672,000 | 6,703,950,528,403,315,000 | 42.685841 | 179 | 0.63861 | false |
Erethon/synnefo | snf-cyclades-app/synnefo/volume/util.py | 2 | 3867 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from synnefo.db import models
from snf_django.lib.api import faults
from synnefo.api.util import get_image_dict, get_vm
from synnefo.plankton import backend
from synnefo.cyclades_settings import cyclades_services, BASE_HOST
from synnefo.lib import join_urls
from synnefo.lib.services import get_service_path
def get_volume(user_id, volume_id, for_update=False,
non_deleted=False,
exception=faults.ItemNotFound):
volumes = models.Volume.objects
if for_update:
volumes = volumes.select_for_update()
try:
volume_id = int(volume_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_id)
try:
volume = volumes.get(id=volume_id, userid=user_id)
if non_deleted and volume.deleted:
raise faults.BadRequest("Volume '%s' has been deleted."
% volume_id)
return volume
except models.Volume.DoesNotExist:
raise exception("Volume %s not found" % volume_id)
def get_volume_type(volume_type_id, for_update=False, include_deleted=False,
exception=faults.ItemNotFound):
vtypes = models.VolumeType.objects
if not include_deleted:
vtypes = vtypes.filter(deleted=False)
if for_update:
vtypes = vtypes.select_for_update()
try:
vtype_id = int(volume_type_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_type_id)
try:
return vtypes.get(id=vtype_id)
except models.VolumeType.DoesNotExist:
raise exception("Volume type %s not found" % vtype_id)
def get_snapshot(user_id, snapshot_id, exception=faults.ItemNotFound):
try:
with backend.PlanktonBackend(user_id) as b:
return b.get_snapshot(snapshot_id)
except faults.ItemNotFound:
raise exception("Snapshot %s not found" % snapshot_id)
def get_image(user_id, image_id, exception=faults.ItemNotFound):
try:
return get_image_dict(image_id, user_id)
except faults.ItemNotFound:
raise exception("Image %s not found" % image_id)
def get_server(user_id, server_id, for_update=False, non_deleted=False,
exception=faults.ItemNotFound):
try:
server_id = int(server_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid server id: %s" % server_id)
try:
return get_vm(server_id, user_id, for_update=for_update,
non_deleted=non_deleted, non_suspended=True)
except faults.ItemNotFound:
raise exception("Server %s not found" % server_id)
VOLUME_URL = \
join_urls(BASE_HOST,
get_service_path(cyclades_services, "volume", version="v2.0"))
VOLUMES_URL = join_urls(VOLUME_URL, "volumes/")
SNAPSHOTS_URL = join_urls(VOLUME_URL, "snapshots/")
def volume_to_links(volume_id):
href = join_urls(VOLUMES_URL, str(volume_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
def snapshot_to_links(snapshot_id):
href = join_urls(SNAPSHOTS_URL, str(snapshot_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
| gpl-3.0 | 9,020,863,018,388,050,000 | -1,982,346,629,358,298,600 | 35.828571 | 76 | 0.673132 | false |
yglazko/socorro | socorro/unittest/testlib/testLoggerForTest.py | 11 | 5192 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from socorro.unittest.testlib.loggerForTest import TestingLogger
import logging
class BogusLogger:
def __init__(self):
self.item = None
def log(self,level,message,*args):
self.item = (level,message,args)
def testConstructor():
tl = TestingLogger()
bl = BogusLogger()
assert None == tl.logger
assert 6 == len(tl.levelcode)
expected = {0:'NOTSET',10:'DEBUG',20:'INFO',30:'WARNING',40:'ERROR',50:'FATAL'}
for c in expected.keys():
assert expected[c] == tl.levelcode[c], 'But at %s expected %s got %s'%(c,expected[c],lc.levelcode[c])
tl = TestingLogger(bl)
assert bl is tl.logger
assert None == bl.item
def testLog():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
for level in range(0,60,10):
tl.log(level,'message')
tlb.log(level,'message')
assert 'message' == tl.buffer[-1]
assert level == tl.levels[-1]
assert (level,'message',()) == bl.item
tl = TestingLogger()
tlb = TestingLogger(bl)
for level in range(0,60,10):
tl.log(level,'message %s %s','one','two')
tlb.log(level,'message %s %s','one','two')
assert 'message one two' == tl.buffer[-1]
assert (level,'message %s %s',('one','two')) == bl.item
def testDebug():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.debug("bug")
tlb.debug("bug")
assert (logging.DEBUG,'bug',()) == bl.item
assert logging.DEBUG == tl.levels[0]
assert logging.DEBUG == tlb.levels[0]
assert 'bug' == tl.buffer[0]
assert 'bug' == tlb.buffer[0]
def testInfo():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.info("info")
tlb.info("info")
assert (logging.INFO,'info',()) == bl.item
assert logging.INFO == tl.levels[0]
assert logging.INFO == tlb.levels[0]
assert 'info' == tl.buffer[0]
assert 'info' == tlb.buffer[0]
def testWarning():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.warning("warning")
tlb.warning("warning")
assert (logging.WARNING,'warning',()) == bl.item
assert logging.WARNING == tl.levels[0]
assert logging.WARNING == tlb.levels[0]
assert 'warning' == tl.buffer[0]
assert 'warning' == tlb.buffer[0]
def testWarn():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.warn("warn")
tlb.warn("warn")
assert (logging.WARN,'warn',()) == bl.item
assert logging.WARN == tl.levels[0]
assert logging.WARN == tlb.levels[0]
assert 'warn' == tl.buffer[0]
assert 'warn' == tlb.buffer[0]
def testError():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.error("error")
tlb.error("error")
assert (logging.ERROR,'error',()) == bl.item
assert logging.ERROR == tl.levels[0]
assert logging.ERROR == tlb.levels[0]
assert 'error' == tl.buffer[0]
assert 'error' == tlb.buffer[0]
def testCritical():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.critical("critical")
tlb.critical("critical")
assert (logging.CRITICAL,'critical',()) == bl.item
assert logging.CRITICAL == tl.levels[0]
assert logging.CRITICAL == tlb.levels[0]
assert 'critical' == tl.buffer[0]
assert 'critical' == tlb.buffer[0]
def testFatal():
bl = BogusLogger()
tl = TestingLogger()
tlb = TestingLogger(bl)
tl.fatal("fatal")
tlb.fatal("fatal")
assert (logging.FATAL,'fatal',()) == bl.item
assert logging.FATAL == tl.levels[0]
assert logging.FATAL == tlb.levels[0]
assert 'fatal' == tl.buffer[0]
assert 'fatal' == tlb.buffer[0]
def testStrFunction():
tl = TestingLogger()
assert '' == str(tl)
tl.debug('debug')
expLines = ['DEBUG (10): debug']
tl.info('info')
expLines.append('INFO (20): info')
tl.warn('warn')
expLines.append('WARNING (30): warn')
tl.warning('warning')
expLines.append('WARNING (30): warning')
tl.error('error')
expLines.append('ERROR (40): error')
tl.critical('critical')
expLines.append('FATAL (50): critical')
tl.fatal('fatal')
expLines.append('FATAL (50): fatal')
expected = "\n".join(expLines)
assert expected == str(tl)
def testLenFunction():
tl = TestingLogger()
exp = 0
assert exp == len(tl)
tl.debug('woo')
exp += 1
assert exp == len(tl)
tl.info('woo')
exp += 1
assert exp == len(tl)
tl.warning('woo')
exp += 1
assert exp == len(tl)
tl.warn('woo')
exp += 1
assert exp == len(tl)
tl.error('woo')
exp += 1
assert exp == len(tl)
tl.critical('woo')
exp += 1
assert exp == len(tl)
tl.fatal('woo')
exp += 1
assert exp == len(tl)
def testClear():
tl = TestingLogger()
tl.clear()
assert 0 == len(tl)
assert 0 == len(tl.levels)
assert 0 == len(tl.buffer)
tl.debug('woo')
tl.info('woo')
tl.warning('woo')
tl.warn('woo')
tl.error('woo')
tl.critical('woo')
tl.fatal('woo')
assert 7 == len(tl)
assert 7 == len(tl.levels)
assert 7 == len(tl.buffer)
tl.clear()
assert 0 == len(tl)
assert 0 == len(tl.levels)
assert 0 == len(tl.buffer)
#def testFormatOne(): handled by testStrFunction()
| mpl-2.0 | 8,951,640,308,643,048,000 | 3,748,470,772,342,123,000 | 24.96 | 105 | 0.635978 | false |
jumpstarter-io/cinder | cinder/quota.py | 6 | 37954 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Quotas for volumes."""
import datetime
from oslo.config import cfg
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder.openstack.common import importutils
from cinder.openstack.common import log as logging
from cinder.openstack.common import timeutils
LOG = logging.getLogger(__name__)
quota_opts = [
cfg.IntOpt('quota_volumes',
default=10,
help='Number of volumes allowed per project'),
cfg.IntOpt('quota_snapshots',
default=10,
help='Number of volume snapshots allowed per project'),
cfg.IntOpt('quota_consistencygroups',
default=10,
help='Number of consistencygroups allowed per project'),
cfg.IntOpt('quota_gigabytes',
default=1000,
help='Total amount of storage, in gigabytes, allowed '
'for volumes and snapshots per project'),
cfg.IntOpt('quota_backups',
default=10,
help='Number of volume backups allowed per project'),
cfg.IntOpt('quota_backup_gigabytes',
default=1000,
help='Total amount of storage, in gigabytes, allowed '
'for backups per project'),
cfg.IntOpt('reservation_expire',
default=86400,
help='Number of seconds until a reservation expires'),
cfg.IntOpt('until_refresh',
default=0,
help='Count of reservations until usage is refreshed'),
cfg.IntOpt('max_age',
default=0,
help='Number of seconds between subsequent usage refreshes'),
cfg.StrOpt('quota_driver',
default='cinder.quota.DbQuotaDriver',
help='Default driver to use for quota checks'),
cfg.BoolOpt('use_default_quota_class',
default=True,
help='Enables or disables use of default quota class '
'with default quota.'), ]
CONF = cfg.CONF
CONF.register_opts(quota_opts)
class DbQuotaDriver(object):
"""Driver to perform check to enforcement of quotas.
Also allows to obtain quota information.
The default driver utilizes the local database.
"""
def get_by_project(self, context, project_id, resource_name):
"""Get a specific quota by project."""
return db.quota_get(context, project_id, resource_name)
def get_by_class(self, context, quota_class, resource_name):
"""Get a specific quota by quota class."""
return db.quota_class_get(context, quota_class, resource_name)
def get_default(self, context, resource):
"""Get a specific default quota for a resource."""
default_quotas = db.quota_class_get_default(context)
return default_quotas.get(resource.name, resource.default)
def get_defaults(self, context, resources):
"""Given a list of resources, retrieve the default quotas.
Use the class quotas named `_DEFAULT_QUOTA_NAME` as default quotas,
if it exists.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
"""
quotas = {}
default_quotas = {}
if CONF.use_default_quota_class:
default_quotas = db.quota_class_get_default(context)
for resource in resources.values():
if resource.name not in default_quotas:
LOG.deprecated(_("Default quota for resource: %(res)s is set "
"by the default quota flag: quota_%(res)s, "
"it is now deprecated. Please use the "
"default quota class for default "
"quota.") % {'res': resource.name})
quotas[resource.name] = default_quotas.get(resource.name,
resource.default)
return quotas
def get_class_quotas(self, context, resources, quota_class,
defaults=True):
"""Given list of resources, retrieve the quotas for given quota class.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param quota_class: The name of the quota class to return
quotas for.
:param defaults: If True, the default value will be reported
if there is no specific value for the
resource.
"""
quotas = {}
default_quotas = {}
class_quotas = db.quota_class_get_all_by_name(context, quota_class)
if defaults:
default_quotas = db.quota_class_get_default(context)
for resource in resources.values():
if resource.name in class_quotas:
quotas[resource.name] = class_quotas[resource.name]
continue
if defaults:
quotas[resource.name] = default_quotas.get(resource.name,
resource.default)
return quotas
def get_project_quotas(self, context, resources, project_id,
quota_class=None, defaults=True,
usages=True):
"""Given a list of resources, retrieve the quotas for the given
project.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param project_id: The ID of the project to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified. It
will be ignored if project_id ==
context.project_id.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
"""
quotas = {}
project_quotas = db.quota_get_all_by_project(context, project_id)
if usages:
project_usages = db.quota_usage_get_all_by_project(context,
project_id)
# Get the quotas for the appropriate class. If the project ID
# matches the one in the context, we use the quota_class from
# the context, otherwise, we use the provided quota_class (if
# any)
if project_id == context.project_id:
quota_class = context.quota_class
if quota_class:
class_quotas = db.quota_class_get_all_by_name(context, quota_class)
else:
class_quotas = {}
default_quotas = self.get_defaults(context, resources)
for resource in resources.values():
# Omit default/quota class values
if not defaults and resource.name not in project_quotas:
continue
quotas[resource.name] = dict(
limit=project_quotas.get(
resource.name,
class_quotas.get(resource.name,
default_quotas[resource.name])),
)
# Include usages if desired. This is optional because one
# internal consumer of this interface wants to access the
# usages directly from inside a transaction.
if usages:
usage = project_usages.get(resource.name, {})
quotas[resource.name].update(
in_use=usage.get('in_use', 0),
reserved=usage.get('reserved', 0), )
return quotas
def _get_quotas(self, context, resources, keys, has_sync, project_id=None):
"""A helper method which retrieves the quotas for specific resources.
This specific resource is identified by keys, and which apply to the
current context.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param keys: A list of the desired quotas to retrieve.
:param has_sync: If True, indicates that the resource must
have a sync attribute; if False, indicates
that the resource must NOT have a sync
attribute.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# Filter resources
if has_sync:
sync_filt = lambda x: hasattr(x, 'sync')
else:
sync_filt = lambda x: not hasattr(x, 'sync')
desired = set(keys)
sub_resources = dict((k, v) for k, v in resources.items()
if k in desired and sync_filt(v))
# Make sure we accounted for all of them...
if len(keys) != len(sub_resources):
unknown = desired - set(sub_resources.keys())
raise exception.QuotaResourceUnknown(unknown=sorted(unknown))
# Grab and return the quotas (without usages)
quotas = self.get_project_quotas(context, sub_resources,
project_id,
context.quota_class, usages=False)
return dict((k, v['limit']) for k, v in quotas.items())
def limit_check(self, context, resources, values, project_id=None):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param values: A dictionary of the values to check against the
quota.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# Ensure no value is less than zero
unders = [key for key, val in values.items() if val < 0]
if unders:
raise exception.InvalidQuotaValue(unders=sorted(unders))
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# Get the applicable quotas
quotas = self._get_quotas(context, resources, values.keys(),
has_sync=False, project_id=project_id)
# Check the quotas and construct a list of the resources that
# would be put over limit by the desired values
overs = [key for key, val in values.items()
if quotas[key] >= 0 and quotas[key] < val]
if overs:
raise exception.OverQuota(overs=sorted(overs), quotas=quotas,
usages={})
def reserve(self, context, resources, deltas, expire=None,
project_id=None):
"""Check quotas and reserve resources.
For counting quotas--those quotas for which there is a usage
synchronization function--this method checks quotas against
current usage and the desired deltas.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it does not have a usage
synchronization function.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns a
list of reservation UUIDs which were created.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param deltas: A dictionary of the proposed delta changes.
:param expire: An optional parameter specifying an expiration
time for the reservations. If it is a simple
number, it is interpreted as a number of
seconds and added to the current time; if it is
a datetime.timedelta object, it will also be
added to the current time. A datetime.datetime
object will be interpreted as the absolute
expiration time. If None is specified, the
default expiration time set by
--default-reservation-expire will be used (this
value will be treated as a number of seconds).
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# Set up the reservation expiration
if expire is None:
expire = CONF.reservation_expire
if isinstance(expire, (int, long)):
expire = datetime.timedelta(seconds=expire)
if isinstance(expire, datetime.timedelta):
expire = timeutils.utcnow() + expire
if not isinstance(expire, datetime.datetime):
raise exception.InvalidReservationExpiration(expire=expire)
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# Get the applicable quotas.
# NOTE(Vek): We're not worried about races at this point.
# Yes, the admin may be in the process of reducing
# quotas, but that's a pretty rare thing.
quotas = self._get_quotas(context, resources, deltas.keys(),
has_sync=True, project_id=project_id)
# NOTE(Vek): Most of the work here has to be done in the DB
# API, because we have to do it in a transaction,
# which means access to the session. Since the
# session isn't available outside the DBAPI, we
# have to do the work there.
return db.quota_reserve(context, resources, quotas, deltas, expire,
CONF.until_refresh, CONF.max_age,
project_id=project_id)
def commit(self, context, reservations, project_id=None):
"""Commit reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
db.reservation_commit(context, reservations, project_id=project_id)
def rollback(self, context, reservations, project_id=None):
"""Roll back reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
db.reservation_rollback(context, reservations, project_id=project_id)
def destroy_all_by_project(self, context, project_id):
"""Destroy all that is associated with a project.
This includes quotas, usages and reservations.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
"""
db.quota_destroy_all_by_project(context, project_id)
def expire(self, context):
"""Expire reservations.
Explores all currently existing reservations and rolls back
any that have expired.
:param context: The request context, for access checks.
"""
db.reservation_expire(context)
class BaseResource(object):
"""Describe a single resource for quota checking."""
def __init__(self, name, flag=None):
"""Initializes a Resource.
:param name: The name of the resource, i.e., "volumes".
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
self.name = name
self.flag = flag
def quota(self, driver, context, **kwargs):
"""Given a driver and context, obtain the quota for this resource.
:param driver: A quota driver.
:param context: The request context.
:param project_id: The project to obtain the quota value for.
If not provided, it is taken from the
context. If it is given as None, no
project-specific quota will be searched
for.
:param quota_class: The quota class corresponding to the
project, or for which the quota is to be
looked up. If not provided, it is taken
from the context. If it is given as None,
no quota class-specific quota will be
searched for. Note that the quota class
defaults to the value in the context,
which may not correspond to the project if
project_id is not the same as the one in
the context.
"""
# Get the project ID
project_id = kwargs.get('project_id', context.project_id)
# Ditto for the quota class
quota_class = kwargs.get('quota_class', context.quota_class)
# Look up the quota for the project
if project_id:
try:
return driver.get_by_project(context, project_id, self.name)
except exception.ProjectQuotaNotFound:
pass
# Try for the quota class
if quota_class:
try:
return driver.get_by_class(context, quota_class, self.name)
except exception.QuotaClassNotFound:
pass
# OK, return the default
return driver.get_default(context, self)
@property
def default(self):
"""Return the default value of the quota."""
return CONF[self.flag] if self.flag else -1
class ReservableResource(BaseResource):
"""Describe a reservable resource."""
def __init__(self, name, sync, flag=None):
"""Initializes a ReservableResource.
Reservable resources are those resources which directly
correspond to objects in the database, i.e., volumes, gigabytes,
etc. A ReservableResource must be constructed with a usage
synchronization function, which will be called to determine the
current counts of one or more resources.
The usage synchronization function will be passed three
arguments: an admin context, the project ID, and an opaque
session object, which should in turn be passed to the
underlying database function. Synchronization functions
should return a dictionary mapping resource names to the
current in_use count for those resources; more than one
resource and resource count may be returned. Note that
synchronization functions may be associated with more than one
ReservableResource.
:param name: The name of the resource, i.e., "volumes".
:param sync: A dbapi methods name which returns a dictionary
to resynchronize the in_use count for one or more
resources, as described above.
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
super(ReservableResource, self).__init__(name, flag=flag)
self.sync = sync
class AbsoluteResource(BaseResource):
"""Describe a non-reservable resource."""
pass
class CountableResource(AbsoluteResource):
"""Describe a resource where counts aren't based only on the project ID."""
def __init__(self, name, count, flag=None):
"""Initializes a CountableResource.
Countable resources are those resources which directly
correspond to objects in the database, i.e., volumes, gigabytes,
etc., but for which a count by project ID is inappropriate. A
CountableResource must be constructed with a counting
function, which will be called to determine the current counts
of the resource.
The counting function will be passed the context, along with
the extra positional and keyword arguments that are passed to
Quota.count(). It should return an integer specifying the
count.
Note that this counting is not performed in a transaction-safe
manner. This resource class is a temporary measure to provide
required functionality, until a better approach to solving
this problem can be evolved.
:param name: The name of the resource, i.e., "volumes".
:param count: A callable which returns the count of the
resource. The arguments passed are as described
above.
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
super(CountableResource, self).__init__(name, flag=flag)
self.count = count
class VolumeTypeResource(ReservableResource):
"""ReservableResource for a specific volume type."""
def __init__(self, part_name, volume_type):
"""Initializes a VolumeTypeResource.
:param part_name: The kind of resource, i.e., "volumes".
:param volume_type: The volume type for this resource.
"""
self.volume_type_name = volume_type['name']
self.volume_type_id = volume_type['id']
name = "%s_%s" % (part_name, self.volume_type_name)
super(VolumeTypeResource, self).__init__(name, "_sync_%s" % part_name)
class QuotaEngine(object):
"""Represent the set of recognized quotas."""
def __init__(self, quota_driver_class=None):
"""Initialize a Quota object."""
if not quota_driver_class:
quota_driver_class = CONF.quota_driver
if isinstance(quota_driver_class, basestring):
quota_driver_class = importutils.import_object(quota_driver_class)
self._resources = {}
self._driver = quota_driver_class
def __contains__(self, resource):
return resource in self.resources
def register_resource(self, resource):
"""Register a resource."""
self._resources[resource.name] = resource
def register_resources(self, resources):
"""Register a list of resources."""
for resource in resources:
self.register_resource(resource)
def get_by_project(self, context, project_id, resource_name):
"""Get a specific quota by project."""
return self._driver.get_by_project(context, project_id, resource_name)
def get_by_class(self, context, quota_class, resource_name):
"""Get a specific quota by quota class."""
return self._driver.get_by_class(context, quota_class, resource_name)
def get_default(self, context, resource):
"""Get a specific default quota for a resource."""
return self._driver.get_default(context, resource)
def get_defaults(self, context):
"""Retrieve the default quotas.
:param context: The request context, for access checks.
"""
return self._driver.get_defaults(context, self.resources)
def get_class_quotas(self, context, quota_class, defaults=True):
"""Retrieve the quotas for the given quota class.
:param context: The request context, for access checks.
:param quota_class: The name of the quota class to return
quotas for.
:param defaults: If True, the default value will be reported
if there is no specific value for the
resource.
"""
return self._driver.get_class_quotas(context, self.resources,
quota_class, defaults=defaults)
def get_project_quotas(self, context, project_id, quota_class=None,
defaults=True, usages=True):
"""Retrieve the quotas for the given project.
:param context: The request context, for access checks.
:param project_id: The ID of the project to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
"""
return self._driver.get_project_quotas(context, self.resources,
project_id,
quota_class=quota_class,
defaults=defaults,
usages=usages)
def count(self, context, resource, *args, **kwargs):
"""Count a resource.
For countable resources, invokes the count() function and
returns its result. Arguments following the context and
resource are passed directly to the count function declared by
the resource.
:param context: The request context, for access checks.
:param resource: The name of the resource, as a string.
"""
# Get the resource
res = self.resources.get(resource)
if not res or not hasattr(res, 'count'):
raise exception.QuotaResourceUnknown(unknown=[resource])
return res.count(context, *args, **kwargs)
def limit_check(self, context, project_id=None, **values):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction. The
values to check are given as keyword arguments, where the key
identifies the specific quota limit to check, and the value is
the proposed value.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
return self._driver.limit_check(context, self.resources, values,
project_id=project_id)
def reserve(self, context, expire=None, project_id=None, **deltas):
"""Check quotas and reserve resources.
For counting quotas--those quotas for which there is a usage
synchronization function--this method checks quotas against
current usage and the desired deltas. The deltas are given as
keyword arguments, and current usage and other reservations
are factored into the quota check.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it does not have a usage
synchronization function.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns a
list of reservation UUIDs which were created.
:param context: The request context, for access checks.
:param expire: An optional parameter specifying an expiration
time for the reservations. If it is a simple
number, it is interpreted as a number of
seconds and added to the current time; if it is
a datetime.timedelta object, it will also be
added to the current time. A datetime.datetime
object will be interpreted as the absolute
expiration time. If None is specified, the
default expiration time set by
--default-reservation-expire will be used (this
value will be treated as a number of seconds).
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
reservations = self._driver.reserve(context, self.resources, deltas,
expire=expire,
project_id=project_id)
LOG.debug("Created reservations %s" % reservations)
return reservations
def commit(self, context, reservations, project_id=None):
"""Commit reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
try:
self._driver.commit(context, reservations, project_id=project_id)
except Exception:
# NOTE(Vek): Ignoring exceptions here is safe, because the
# usage resynchronization and the reservation expiration
# mechanisms will resolve the issue. The exception is
# logged, however, because this is less than optimal.
LOG.exception(_("Failed to commit reservations %s") % reservations)
def rollback(self, context, reservations, project_id=None):
"""Roll back reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
try:
self._driver.rollback(context, reservations, project_id=project_id)
except Exception:
# NOTE(Vek): Ignoring exceptions here is safe, because the
# usage resynchronization and the reservation expiration
# mechanisms will resolve the issue. The exception is
# logged, however, because this is less than optimal.
LOG.exception(_("Failed to roll back reservations "
"%s") % reservations)
def destroy_all_by_project(self, context, project_id):
"""Destroy all quotas, usages, and reservations associated with a
project.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
"""
self._driver.destroy_all_by_project(context, project_id)
def expire(self, context):
"""Expire reservations.
Explores all currently existing reservations and rolls back
any that have expired.
:param context: The request context, for access checks.
"""
self._driver.expire(context)
def add_volume_type_opts(self, context, opts, volume_type_id):
"""Add volume type resource options.
Adds elements to the opts hash for volume type quotas.
If a resource is being reserved ('gigabytes', etc) and the volume
type is set up for its own quotas, these reservations are copied
into keys for 'gigabytes_<volume type name>', etc.
:param context: The request context, for access checks.
:param opts: The reservations options hash.
:param volume_type_id: The volume type id for this reservation.
"""
if not volume_type_id:
return
# NOTE(jdg): set inactive to True in volume_type_get, as we
# may be operating on a volume that was created with a type
# that has since been deleted.
volume_type = db.volume_type_get(context, volume_type_id, True)
for quota in ('volumes', 'gigabytes', 'snapshots'):
if quota in opts:
vtype_quota = "%s_%s" % (quota, volume_type['name'])
opts[vtype_quota] = opts[quota]
@property
def resource_names(self):
return sorted(self.resources.keys())
@property
def resources(self):
return self._resources
class VolumeTypeQuotaEngine(QuotaEngine):
"""Represent the set of all quotas."""
@property
def resources(self):
"""Fetches all possible quota resources."""
result = {}
# Global quotas.
argses = [('volumes', '_sync_volumes', 'quota_volumes'),
('snapshots', '_sync_snapshots', 'quota_snapshots'),
('gigabytes', '_sync_gigabytes', 'quota_gigabytes'),
('backups', '_sync_backups', 'quota_backups'),
('backup_gigabytes', '_sync_backup_gigabytes',
'quota_backup_gigabytes')]
for args in argses:
resource = ReservableResource(*args)
result[resource.name] = resource
# Volume type quotas.
volume_types = db.volume_type_get_all(context.get_admin_context(),
False)
for volume_type in volume_types.values():
for part_name in ('volumes', 'gigabytes', 'snapshots'):
resource = VolumeTypeResource(part_name, volume_type)
result[resource.name] = resource
return result
def register_resource(self, resource):
raise NotImplementedError(_("Cannot register resource"))
def register_resources(self, resources):
raise NotImplementedError(_("Cannot register resources"))
class CGQuotaEngine(QuotaEngine):
"""Represent the consistencygroup quotas."""
@property
def resources(self):
"""Fetches all possible quota resources."""
result = {}
# Global quotas.
argses = [('consistencygroups', '_sync_consistencygroups',
'quota_consistencygroups'), ]
for args in argses:
resource = ReservableResource(*args)
result[resource.name] = resource
return result
def register_resource(self, resource):
raise NotImplementedError(_("Cannot register resource"))
def register_resources(self, resources):
raise NotImplementedError(_("Cannot register resources"))
QUOTAS = VolumeTypeQuotaEngine()
CGQUOTAS = CGQuotaEngine()
| apache-2.0 | -4,607,201,789,738,581,500 | -3,206,126,553,655,647,700 | 40.254348 | 79 | 0.597276 | false |
timm/sandbox | py/cocomo/poly.py | 1 | 3693 | import random
def tunings(_ = None):
Within(txt="loc", lo=2, hi=2000)
prep([
# vlow low nom high vhigh xhigh
# scale factors:
'Flex', 5.07, 4.05, 3.04, 2.03, 1.01, _],[
'Pmat', 7.80, 6.24, 4.68, 3.12, 1.56, _],[
'Prec', 6.20, 4.96, 3.72, 2.48, 1.24, _],[
'Resl', 7.07, 5.65, 4.24, 2.83, 1.41, _],[
'Team', 5.48, 4.38, 3.29, 2.19, 1.01, _],[
# effort multipliers:
'acap', 1.42, 1.19, 1.00, 0.85, 0.71, _],[
'aexp', 1.22, 1.10, 1.00, 0.88, 0.81, _],[
'cplx', 0.73, 0.87, 1.00, 1.17, 1.34, 1.74],[
'data', _, 0.90, 1.00, 1.14, 1.28, _],[
'docu', 0.81, 0.91, 1.00, 1.11, 1.23, _],[
'ltex', 1.20, 1.09, 1.00, 0.91, 0.84, _],[
'pcap', 1.34, 1.15, 1.00, 0.88, 0.76, _],[
'pcon', 1.29, 1.12, 1.00, 0.90, 0.81, _],[
'plex', 1.19, 1.09, 1.00, 0.91, 0.85, _],[
'pvol', _, 0.87, 1.00, 1.15, 1.30, _],[
'rely', 0.82, 0.92, 1.00, 1.10, 1.26, _],[
'ruse', _, 0.95, 1.00, 1.07, 1.15, 1.24],[
'sced', 1.43, 1.14, 1.00, 1.00, 1.00, _],[
'site', 1.22, 1.09, 1.00, 0.93, 0.86, 0.80],[
'stor', _, _, 1.00, 1.05, 1.17, 1.46],[
'time', _, _, 1.00, 1.11, 1.29, 1.63],[
'tool', 1.17, 1.09, 1.00, 0.90, 0.78, _])
def COCOMO2(project, t, a=2.94, b=0.91, e=2.7182818285):
em = t.acap() * t.aexp() * t.cplx() * t.data() * t.docu() * \
t.ltex() * t.pcap() * t.pcon() * t.plex() * t.pvol() * \
t.rely() * t.ruse() * t.sced() * t.site() * t.stor() * \
t.time() * t.tool()
sf = t.flex() + t.pmat() + t.prec() + t.resl() + t.team()
return a*em*loc()**(b+ 0.01*sf)
#####################################################
class o:
def __init__(i, **d): i.__dict__.update(d)
def X(y): return y() if callable(y) else y
def prep(*rows):
[ OneOf(row[0],row[1:]) for row in rows]
class Var:
all = o()
def __init__(i, txt) :
Var.all.__dict__[txt] = i
i.txt = txt
i.local = None
i.reset()
def reset(i): i.cache = Cache(i.any)
def any(i): return random.choice(i.local) if i.local else i.any1()
def __call__(i) : return i.cache()
def __neg__(i) : return -1*X(i)
def __pos__(i) : return +1*X(i)
def __abs__(i) : return abs(X(i))
def __lt__(i,j) : return X(i) < X(j)
def __gt__(i,j) : return X(i) > X(j)
def __le__(i,j) : return X(i) <= X(j)
def __ge__(i,j) : return X(i) >= X(j)
def __ne__(i,j) : return X(i) != X(j)
def __eq__(i,j) : return X(i) == X(j)
def __add__(i,j) : return X(i) + X(j)
def __sub__(i,j) : return X(i) - X(j)
def __mul__(i,j) : return X(i) * X(j)
def __mod__(i,j) : return X(i) % X(j)
def __pow__(i,j) : return X(i) ** X(j)
def __truediv__(i,j) : return X(i) / X(j)
def __floordiv__(i,j): return X(i) // X(j)
class Within(Var):
def __init__(i, txt, lo=0, hi=1):
super().__init__(txt)
i.lo, i.hi = lo,hi
def xplain(i,x): return x
def any1(i) : return random.uniform(i.lo, i.hi)
def __repr__(i): return '<%s %s to %s>' % (i.txt, i.lo, i.hi)
class OneOf(Var):
def __init__(i,txt,lst):
super().__init__(txt)
i.d = {n:x for n,x in enumerate(lst) if x}
i.show = {i.d[x]:x for x in i.d}
def xplain(i,x): return i.show[x]
def any1(i) : return i.d[ random.choice(list(i.d.keys())) ]
def __repr__(i): return '<%s %s>' % (i.txt, list(i.d.keys()))
class Cache():
def __init__(i, fun):
i.kept, i.fun = None,fun
def __call__(i):
i.kept = i.kept if i.kept is not None else X(i.fun)
return i.kept
tunings()
random.seed(1)
for _ in range(0,20):
#Var.all.acap.reset()
print([Var.all.acap.xplain(Var.all.acap()) for _ in range(0,10)])
| bsd-3-clause | 3,807,558,914,699,620,400 | 6,377,476,052,503,896,000 | 34.171429 | 68 | 0.461143 | false |
ForensicTools/GRREAT-475_2141-Chaigon-Failey-Siebert | gui/plugins/hunt_view.py | 2 | 45904 | #!/usr/bin/env python
# -*- mode: python; encoding: utf-8 -*-
#
"""This is the interface for managing hunts."""
import collections as py_collections
import operator
import StringIO
import urllib
import logging
from grr.gui import plot_lib
from grr.gui import renderers
from grr.gui.plugins import crash_view
from grr.gui.plugins import fileview
from grr.gui.plugins import foreman
from grr.gui.plugins import forms
from grr.gui.plugins import searchclient
from grr.gui.plugins import semantic
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import data_store
from grr.lib import flow
from grr.lib import hunts
from grr.lib import rdfvalue
from grr.lib import utils
class ManageHunts(renderers.Splitter2Way):
"""Manages Hunts GUI Screen."""
description = "Hunt Manager"
behaviours = frozenset(["General"])
top_renderer = "HuntTable"
bottom_renderer = "HuntViewTabs"
context_help_url = "user_manual.html#_creating_a_hunt"
layout_template = (renderers.Splitter2Way.layout_template +
renderers.TemplateRenderer.help_template)
def Layout(self, request, response):
response = super(ManageHunts, self).Layout(request, response)
return self.CallJavascript(response, "ManageHunts.Layout")
class HuntStateIcon(semantic.RDFValueRenderer):
"""Render the hunt state by using an icon.
This class is similar to FlowStateIcon, but it also adds STATE_STOPPED
state for hunts that were created but not yet started (because of lack of
approval, for example).
"""
layout_template = renderers.Template("""
<div class="centered hunt-state-icon" state="{{this.state_str|escape}}">
<img class='grr-icon grr-flow-icon'
src='/static/images/{{this.icon|escape}}' />
</div>
""")
# Maps the flow states to icons we can show
state_map = {"STOPPED": "stock_yes.png",
"STARTED": "clock.png",
"PAUSED": "pause.png"}
def Layout(self, request, response):
self.state_str = str(self.proxy)
self.icon = self.state_map.get(self.proxy, "question-red.png")
return super(HuntStateIcon, self).Layout(request, response)
class RunHuntConfirmationDialog(renderers.ConfirmationDialogRenderer):
"""Dialog that asks confirmation to run a hunt and actually runs it."""
post_parameters = ["hunt_id"]
header = "Run a hunt?"
content_template = renderers.Template("""
<p>Are you sure you want to <strong>run</strong> this hunt?</p>
""")
ajax_template = renderers.Template("""
<p class="text-info">Hunt started successfully!</p>
""")
def Layout(self, request, response):
self.check_access_subject = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
return super(RunHuntConfirmationDialog, self).Layout(request, response)
def RenderAjax(self, request, response):
flow.GRRFlow.StartFlow(flow_name="StartHuntFlow", token=request.token,
hunt_urn=rdfvalue.RDFURN(request.REQ.get("hunt_id")))
return self.RenderFromTemplate(self.ajax_template, response,
unique=self.unique)
class PauseHuntConfirmationDialog(renderers.ConfirmationDialogRenderer):
"""Dialog that asks confirmation to pause a hunt and actually runs it."""
post_parameters = ["hunt_id"]
header = "Pause a hunt?"
content_template = renderers.Template("""
<p>Are you sure you want to <strong>pause</strong> this hunt?</p>
""")
ajax_template = renderers.Template("""
<p class="text-info">Hunt paused successfully!</p>
""")
def Layout(self, request, response):
self.check_access_subject = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
return super(PauseHuntConfirmationDialog, self).Layout(request, response)
def RenderAjax(self, request, response):
flow.GRRFlow.StartFlow(flow_name="PauseHuntFlow", token=request.token,
hunt_urn=rdfvalue.RDFURN(request.REQ.get("hunt_id")))
return self.RenderFromTemplate(self.ajax_template, response,
unique=self.unique)
class ModifyHuntDialog(renderers.ConfirmationDialogRenderer):
"""Dialog that allows user to modify certain hunt parameters."""
post_parameters = ["hunt_id"]
header = "Modify a hunt"
proceed_button_title = "Modify!"
expiry_time_dividers = ((60*60*24, "d"), (60*60, "h"), (60, "m"), (1, "s"))
content_template = renderers.Template("""
{{this.hunt_params_form|safe}}
""")
ajax_template = renderers.Template("""
<p class="text-info">Hunt modified successfully!</p>
""")
def Layout(self, request, response):
"""Layout handler."""
hunt_urn = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
with aff4.FACTORY.Open(hunt_urn, aff4_type="GRRHunt",
token=request.token) as hunt:
runner = hunt.GetRunner()
hunt_args = rdfvalue.ModifyHuntFlowArgs(
client_limit=runner.args.client_limit,
expiry_time=runner.context.expires,
)
self.hunt_params_form = forms.SemanticProtoFormRenderer(
hunt_args, supressions=["hunt_urn"]).RawHTML(request)
self.check_access_subject = hunt_urn
return super(ModifyHuntDialog, self).Layout(request, response)
def RenderAjax(self, request, response):
"""Starts ModifyHuntFlow that actually modifies a hunt."""
hunt_urn = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
args = forms.SemanticProtoFormRenderer(
rdfvalue.ModifyHuntFlowArgs()).ParseArgs(request)
flow.GRRFlow.StartFlow(flow_name="ModifyHuntFlow", token=request.token,
hunt_urn=hunt_urn, args=args)
return self.RenderFromTemplate(self.ajax_template, response,
unique=self.unique)
class HuntTable(fileview.AbstractFileTable):
"""Show all hunts."""
selection_publish_queue = "hunt_select"
custom_class = "HuntTable"
layout_template = """
<div id="new_hunt_dialog_{{unique|escape}}"
class="modal wide-modal high-modal" update_on_show="true"
tabindex="-1" role="dialog" aria-hidden="true">
</div>
<div id="run_hunt_dialog_{{unique|escape}}"
class="modal" tabindex="-1" role="dialog" aria-hidden="true">
</div>
<div id="pause_hunt_dialog_{{unique|escape}}"
class="modal" tabindex="-1" role="dialog" aria-hidden="true">
</div>
<div id="modify_hunt_dialog_{{unique|escape}}"
class="modal" tabindex="-1" role="dialog" aria-hidden="true">
</div>
<ul class="breadcrumb">
<li>
<button id='new_hunt_{{unique|escape}}' title='New Hunt'
class="btn btn-default" name="NewHunt" data-toggle="modal"
data-target="#new_hunt_dialog_{{unique|escape}}">
<img src='/static/images/new.png' class='toolbar_icon'>
</button>
<div class="btn-group">
<button id='run_hunt_{{unique|escape}}' title='Run Hunt'
class="btn btn-default" disabled="yes" name="RunHunt" data-toggle="modal"
data-target="#run_hunt_dialog_{{unique|escape}}">
<img src='/static/images/play_button.png' class='toolbar_icon'>
</button>
<button id='pause_hunt_{{unique|escape}}' title='Pause Hunt'
class="btn btn-default" disabled="yes" name="PauseHunt" data-toggle="modal"
data-target="#pause_hunt_dialog_{{unique|escape}}">
<img src='/static/images/pause_button.png' class='toolbar_icon'>
</button>
<button id='modify_hunt_{{unique|escape}}' title='Modify Hunt'
class="btn btn-default" disabled="yes" name="ModifyHunt" data-toggle="modal"
data-target="#modify_hunt_dialog_{{unique|escape}}">
<img src='/static/images/modify.png' class='toolbar_icon'>
</button>
<button id='toggle_robot_hunt_display_{{unique|escape}}'
title='Show/Hide Automated hunts'
class="btn btn-default" name="ToggleRobotHuntDisplay">
<img src='/static/images/robot.png' class='toolbar_icon'>
</button>
</div>
<div class="new_hunt_dialog" id="new_hunt_dialog_{{unique|escape}}"
class="hide" />
</li>
</ul>
""" + fileview.AbstractFileTable.layout_template
root_path = "aff4:/hunts"
def __init__(self, **kwargs):
super(HuntTable, self).__init__(**kwargs)
self.AddColumn(semantic.RDFValueColumn(
"Status", renderer=HuntStateIcon, width="40px"))
# The hunt id is the AFF4 URN for the hunt object.
self.AddColumn(semantic.RDFValueColumn(
"Hunt ID", renderer=semantic.SubjectRenderer))
self.AddColumn(semantic.RDFValueColumn("Name"))
self.AddColumn(semantic.RDFValueColumn("Start Time", width="16em"))
self.AddColumn(semantic.RDFValueColumn("Expires", width="16em"))
self.AddColumn(semantic.RDFValueColumn("Client Limit"))
self.AddColumn(semantic.RDFValueColumn("Creator"))
self.AddColumn(semantic.RDFValueColumn("Description", width="100%"))
def Layout(self, request, response):
response = super(HuntTable, self).Layout(request, response)
return self.CallJavascript(response, "HuntTable.Layout")
def BuildTable(self, start_row, end_row, request):
fd = aff4.FACTORY.Open("aff4:/hunts", mode="r", token=request.token)
try:
children = list(fd.ListChildren())
nr_hunts = len(children)
children.sort(key=operator.attrgetter("age"), reverse=True)
children = children[start_row:end_row]
hunt_list = []
for hunt in fd.OpenChildren(children=children):
# Skip hunts that could not be unpickled.
if not isinstance(hunt, hunts.GRRHunt) or not hunt.state:
continue
hunt.create_time = hunt.GetRunner().context.create_time
hunt_list.append(hunt)
hunt_list.sort(key=lambda x: x.create_time, reverse=True)
could_not_display = []
row_index = start_row
for hunt_obj in hunt_list:
if not isinstance(hunt_obj, hunts.GRRHunt):
could_not_display.append((hunt_obj, "Object is not a valid hunt."))
continue
if hunt_obj.state.Empty():
logging.error("Hunt without a valid state found: %s", hunt_obj)
could_not_display.append((hunt_obj,
"Hunt doesn't have a valid state."))
continue
runner = hunt_obj.GetRunner()
description = (runner.args.description or
hunt_obj.__class__.__doc__.split("\n", 1)[0])
self.AddRow({"Hunt ID": hunt_obj.urn,
"Name": hunt_obj.__class__.__name__,
"Status": hunt_obj.Get(hunt_obj.Schema.STATE),
"Start Time": runner.context.start_time,
"Expires": runner.context.expires,
"Client Limit": runner.args.client_limit,
"Creator": runner.context.creator,
"Description": description},
row_index=row_index)
# Hide automated hunts by default
if runner.context.creator == "GRRWorker":
self.SetRowClass(row_index, "robot-hunt hide")
row_index += 1
for hunt_obj, reason in could_not_display:
self.AddRow({"Hunt ID": hunt_obj.urn,
"Description": reason},
row_index=row_index)
row_index += 1
except IOError as e:
logging.error("Bad hunt %s", e)
return nr_hunts >= end_row
class HuntViewTabs(renderers.TabLayout):
"""Show a tabset to inspect the selected hunt.
Listening Javascript Events:
- file_select(aff4_path, age) - A selection event on the hunt table
informing us of a new hunt to show. We redraw the entire bottom right
side using a new renderer.
"""
names = ["Overview", "Log", "Errors", "Graph", "Results", "Stats",
"Crashes", "Outstanding", "Context Detail"]
delegated_renderers = ["HuntOverviewRenderer", "HuntLogRenderer",
"HuntErrorRenderer",
"HuntClientGraphRenderer", "HuntResultsRenderer",
"HuntStatsRenderer", "HuntCrashesRenderer",
"HuntOutstandingRenderer", "HuntContextView"]
empty_template = renderers.Template("""
<div class="padded" id="{{unique|escape}}">
<p>Please select a hunt to see its details here.</p>
</div>
""")
post_parameters = ["hunt_id"]
def Layout(self, request, response):
hunt_id = request.REQ.get("hunt_id")
if hunt_id:
response = super(HuntViewTabs, self).Layout(request, response)
else:
response = super(HuntViewTabs, self).Layout(
request, response, apply_template=self.empty_template)
return self.CallJavascript(response, "HuntViewTabs.Layout")
class ManageHuntsClientView(renderers.Splitter2Way):
"""Manages the clients involved in a hunt."""
description = "Hunt Client View"
top_renderer = "HuntClientTableRenderer"
bottom_renderer = "HuntClientViewTabs"
class ResourceRenderer(semantic.RDFValueRenderer):
"""Renders resource usage as meters."""
cls = "vertical_aligned"
layout_template = renderers.Template(
"<div>"
"<meter value=\"{{this.proxy|escape}}\"></meter>"
"</div>")
class FloatRenderer(semantic.RDFValueRenderer):
layout_template = renderers.Template("{{this.value|escape}}")
def Layout(self, request, response):
if self.proxy is None:
self.value = "0.0"
else:
self.value = "%.2f" % self.proxy
super(FloatRenderer, self).Layout(request, response)
class HuntClientTableRenderer(fileview.AbstractFileTable):
"""Displays the clients."""
selection_publish_queue = "hunt_client_select"
layout_template = """
{{this.title|escape}}
<a id="backlink_{{unique|escape}}" href='#{{this.hash|escape}}'>
back to hunt view</a>
<span class='pull-right'> Filter by State
<select id='{{unique|escape}}_select'>
<option>ALL</option>
<option>OUTSTANDING</option>
<option>COMPLETED</option>
<option>BAD</option>
</select>
</span>
""" + fileview.AbstractFileTable.layout_template
post_parameters = ["hunt_id"]
def __init__(self, **kwargs):
super(HuntClientTableRenderer, self).__init__(**kwargs)
self.AddColumn(semantic.RDFValueColumn(
"Client ID", width="20%", renderer=semantic.SubjectRenderer))
self.AddColumn(semantic.RDFValueColumn("Hostname", width="10%"))
self.AddColumn(semantic.RDFValueColumn("Status", width="10%"))
self.AddColumn(semantic.RDFValueColumn("User CPU seconds", width="10%",
renderer=FloatRenderer))
self.AddColumn(semantic.RDFValueColumn("System CPU seconds", width="10%",
renderer=FloatRenderer))
self.AddColumn(semantic.RDFValueColumn("CPU",
renderer=ResourceRenderer,
width="10%"))
self.AddColumn(semantic.RDFValueColumn("Network bytes sent", width="10%"))
self.AddColumn(semantic.RDFValueColumn("Network",
renderer=ResourceRenderer,
width="10%"))
self.AddColumn(semantic.RDFValueColumn("Last Checkin", width="10%"))
def Layout(self, request, response):
"""Ensure our hunt is in our state for HTML layout."""
hunt_id = request.REQ.get("hunt_id")
self.title = "Viewing Hunt %s" % hunt_id
h = dict(main="ManageHunts", hunt_id=hunt_id)
self.hunt_hash = urllib.urlencode(sorted(h.items()))
response = super(HuntClientTableRenderer, self).Layout(request, response)
return self.CallJavascript(response, "HuntClientTableRenderer.Layout",
hunt_hash=self.hunt_hash)
def BuildTable(self, start_row, end_row, request):
"""Called to fill in the data in the table."""
hunt_id = request.REQ.get("hunt_id")
completion_status_filter = request.REQ.get("completion_status", "ALL")
if hunt_id is None:
return
try:
self.hunt = aff4.FACTORY.Open(hunt_id, token=request.token,
aff4_type="GRRHunt")
except IOError:
logging.error("Invalid hunt %s", hunt_id)
return
# TODO(user): enable per-client resource usage display.
resource_usage = {}
resource_max = [0, 0, 0]
for resource in resource_usage.values():
for i in range(3):
if resource_max[i] < resource[i]:
resource_max[i] = resource[i]
results = {}
for status, client_list in self.hunt.GetClientsByStatus().items():
if (completion_status_filter == "ALL" or
status == completion_status_filter):
for client in client_list:
results[client] = status
# Get the list of clients and sort so that we can page accurately.
client_list = results.keys()
client_list.sort()
client_list = client_list[start_row:end_row]
row_index = start_row
for c_urn, cdict in self.hunt.GetClientStates(client_list):
row = {"Client ID": c_urn,
"Hostname": cdict.get("hostname"),
"Status": results[c_urn],
"Last Checkin": searchclient.FormatLastSeenTime(
cdict.get("age") or 0),
}
client_id = c_urn.Basename()
if client_id in resource_usage:
usage = resource_usage[client_id]
row["User CPU seconds"] = usage[0]
row["System CPU seconds"] = usage[1]
row["Network bytes sent"] = usage[2]
usage_percent = []
for i in range(3):
if resource_max[i]:
usage_percent.append(round(usage[i], 2) / resource_max[i])
else:
usage_percent.append(0.0)
row["CPU"] = usage_percent[0]
row["Network"] = usage_percent[2]
else:
row["User CPU seconds"] = 0
row["System CPU seconds"] = 0
row["Network bytes sent"] = 0
row["CPU"] = 0
row["Network"] = 0
self.AddRow(row, row_index)
row_index += 1
self.size = len(results)
class AbstractLogRenderer(renderers.TemplateRenderer):
"""Render a page for view a Log file.
Implements a very simple view. That will be extended with filtering
capabilities.
Implementations should implement the GetLog function.
"""
show_total_count = False
layout_template = renderers.Template("""
<table class="proto_table">
{% if this.log|length > 0 %}
{% if this.show_total_count %}
<h5>{{this.log|length}} Entries</h5>
{% endif %}
{% endif %}
{% for line in this.log %}
<tr>
{% for val in line %}
<td class="proto_key">{{ val|safe }}</td>
{% endfor %}
</tr>
{% empty %}
<tr><td>No entries</tr></td>
{% endfor %}
<table>
""")
def GetLog(self, request):
"""Take a request and return a list of tuples for a log."""
_ = request
return []
def Layout(self, request, response):
"""Fill in the form with the specific fields for the flow requested."""
self.log = []
for row in self.GetLog(request):
rendered_row = []
for item in row:
item_renderer = semantic.FindRendererForObject(item)
rendered_row.append(item_renderer.RawHTML(request))
self.log.append(rendered_row)
return super(AbstractLogRenderer, self).Layout(request, response)
class HuntOverviewRenderer(AbstractLogRenderer):
"""Renders the overview tab."""
# Will be retrieved from request.REQ if not set.
hunt_id = None
layout_template = renderers.Template("""
<a id="ViewHuntDetails_{{unique}}" href='#{{this.hash|escape}}'
onclick='grr.loadFromHash("{{this.hash|escape}}");'
class="btn btn-info">
View hunt details
</a>
<br/>
<dl class="dl-horizontal dl-hunt">
<dt>Name</dt><dd>{{ this.hunt_name|escape }}</dd>
<dt>Hunt ID</dt>
<dd>{{ this.hunt.urn.Basename|escape }}</dd>
<dt>Hunt URN</dt>
<dd>{{ this.hunt.urn|escape }}</dd>
<dt>Creator</dt>
<dd>{{ this.hunt_creator|escape }}</dd>
<dt>Client Limit</dt>
{% if this.client_limit == 0 %}
<dd>None</dd>
{% else %}
<dd>{{ this.client_limit|escape }}</dd>
{% endif %}
<dt>Client Rate (clients/min)</dt>
{% if this.client_rate == 0.0 %}
<dd>No rate limit</dd>
{% else %}
<dd>{{ this.client_rate|escape }}</dd>
{% endif %}
<dt>Clients Scheduled</dt>
<dd>{{ this.all_clients_count|escape }}</dd>
<dt>Outstanding</dt>
<dd>{{ this.outstanding_clients_count|escape }}</dd>
<dt>Completed</dt>
<dd>{{ this.completed_clients_count|escape }}</dd>
<dt>Total CPU seconds used</dt>
<dd>{{ this.cpu_sum|escape }}</dd>
<dt>Total network traffic</dt>
<dd>{{ this.net_sum|filesizeformat }}</dd>
<dt>Regex Rules</dt>
<dd>{{ this.regex_rules|safe }}</dd>
<dt>Integer Rules</dt>
<dd>{{ this.integer_rules|safe }}</dd>
<dt>Arguments</dt><dd>{{ this.args_str|safe }}</dd>
{% for key, val in this.data.items %}
<dt>{{ key|escape }}</dt><dd>{{ val|escape }}</dd>
{% endfor %}
</dl>
""")
error_template = renderers.Template(
"No information available for this Hunt.")
ajax_template = renderers.Template("""
<div id="RunHuntResult_{{unique|escape}}"></div>
""")
def RenderAjax(self, request, response):
self.hunt_id = request.REQ.get("hunt_id")
self.subject = rdfvalue.RDFURN(self.hunt_id)
response = renderers.TemplateRenderer.Layout(
self, request, response, apply_template=self.ajax_template)
return self.CallJavascript(response, "HuntOverviewRenderer.RenderAjax",
subject=self.subject, hunt_id=self.hunt_id)
def Layout(self, request, response):
"""Display the overview."""
if not self.hunt_id:
self.hunt_id = request.REQ.get("hunt_id")
h = dict(main="ManageHuntsClientView", hunt_id=self.hunt_id)
self.hash = urllib.urlencode(sorted(h.items()))
self.data = {}
self.args_str = ""
if self.hunt_id:
try:
self.hunt = aff4.FACTORY.Open(self.hunt_id, aff4_type="GRRHunt",
token=request.token)
if self.hunt.state.Empty():
raise IOError("No valid state could be found.")
hunt_stats = self.hunt.state.context.usage_stats
self.cpu_sum = "%.2f" % hunt_stats.user_cpu_stats.sum
self.net_sum = hunt_stats.network_bytes_sent_stats.sum
(self.all_clients_count,
self.completed_clients_count, _) = self.hunt.GetClientsCounts()
self.outstanding_clients_count = (self.all_clients_count -
self.completed_clients_count)
runner = self.hunt.GetRunner()
self.hunt_name = runner.args.hunt_name
self.hunt_creator = runner.context.creator
self.data = py_collections.OrderedDict()
self.data["Start Time"] = runner.context.start_time
self.data["Expiry Time"] = runner.context.expires
self.data["Status"] = self.hunt.Get(self.hunt.Schema.STATE)
self.client_limit = runner.args.client_limit
self.client_rate = runner.args.client_rate
self.args_str = renderers.DictRenderer(
self.hunt.state, filter_keys=["context"]).RawHTML(request)
if runner.args.regex_rules:
self.regex_rules = foreman.RegexRuleArray(
runner.args.regex_rules).RawHTML(request)
else:
self.regex_rules = "None"
if runner.args.integer_rules:
self.integer_rules = foreman.IntegerRuleArray(
runner.args.integer_rules).RawHTML(request)
else:
self.integer_rules = "None"
except IOError:
self.layout_template = self.error_template
return super(AbstractLogRenderer, self).Layout(request, response)
class HuntContextView(renderers.TemplateRenderer):
"""Render a the hunt context."""
layout_template = renderers.Template("""
{{this.args_str|safe}}
""")
def Layout(self, request, response):
"""Display hunt's context presented as dict."""
if not hasattr(self, "hunt_id"):
self.hunt_id = request.REQ.get("hunt_id")
self.hunt = aff4.FACTORY.Open(self.hunt_id, aff4_type="GRRHunt",
token=request.token)
if self.hunt.state.Empty():
raise IOError("No valid state could be found.")
self.args_str = renderers.DictRenderer(
self.hunt.state.context).RawHTML(request)
return super(HuntContextView, self).Layout(request, response)
class HuntLogRenderer(renderers.AngularDirectiveRenderer):
directive = "grr-hunt-log"
def Layout(self, request, response):
self.directive_args = {}
self.directive_args["hunt-urn"] = request.REQ.get("hunt_id")
return super(HuntLogRenderer, self).Layout(request, response)
class HuntErrorRenderer(renderers.AngularDirectiveRenderer):
directive = "grr-hunt-errors"
def Layout(self, request, response):
self.directive_args = {}
self.directive_args["hunt-urn"] = request.REQ.get("hunt_id")
return super(HuntErrorRenderer, self).Layout(request, response)
class HuntClientViewTabs(renderers.TabLayout):
"""Show a tabset to inspect the selected client of the selected hunt."""
names = ["Status", "Hunt Log", "Hunt Errors", "Client Detail"]
delegated_renderers = ["HuntClientOverviewRenderer", "HuntLogRenderer",
"HuntErrorRenderer", "HuntHostInformationRenderer"]
post_parameters = ["hunt_id", "hunt_client"]
def Layout(self, request, response):
response = super(HuntClientViewTabs, self).Layout(request, response)
return self.CallJavascript(response, "HuntClientViewTabs.Layout",
hunt_id=self.state["hunt_id"])
class HuntClientOverviewRenderer(renderers.TemplateRenderer):
"""Renders the Client Hunt Overview tab."""
layout_template = renderers.Template("""
<a href='#{{this.hash|escape}}' onclick='grr.loadFromHash(
"{{this.hash|escape}}");' ">
Go to client {{ this.client.urn|escape }}
</a>
<table class="proto_table">
<tr><td class="proto_key">Last Checkin</td>
<td>{{ this.last_checkin|escape }}</td>
</table>
""")
def Layout(self, request, response):
"""Display the overview."""
hunt_id = request.REQ.get("hunt_id")
hunt_client = request.REQ.get("hunt_client")
if hunt_id is not None and hunt_client is not None:
try:
self.client = aff4.FACTORY.Open(hunt_client, token=request.token,
aff4_type="VFSGRRClient")
self.last_checkin = rdfvalue.RDFDatetime(
self.client.Get(self.client.Schema.PING))
h = dict(main="HostInformation", c=self.client.client_id)
self.hash = urllib.urlencode(sorted(h.items()))
except IOError as e:
logging.error("Attempt to open client %s. Err %s", hunt_client, e)
return super(HuntClientOverviewRenderer, self).Layout(request, response)
class HuntClientGraphRenderer(renderers.TemplateRenderer):
"""Renders the button to download a hunt graph."""
layout_template = renderers.Template("""
{% if this.clients %}
<button id="{{ unique|escape }}">
Generate
</button>
{% else %}
No data to graph yet.
{% endif %}
""")
def Layout(self, request, response):
self.hunt_id = request.REQ.get("hunt_id")
hunt = aff4.FACTORY.Open(self.hunt_id, token=request.token)
all_count, _, _ = hunt.GetClientsCounts()
self.clients = bool(all_count)
response = super(HuntClientGraphRenderer, self).Layout(request, response)
return self.CallJavascript(response, "HuntClientGraphRenderer.Layout",
hunt_id=self.hunt_id)
class HuntClientCompletionGraphRenderer(renderers.ImageDownloadRenderer):
def Content(self, request, _):
"""Generates the actual image to display."""
hunt_id = request.REQ.get("hunt_id")
hunt = aff4.FACTORY.Open(hunt_id, aff4_type="GRRHunt", token=request.token)
clients_by_status = hunt.GetClientsByStatus()
cl = clients_by_status["STARTED"]
fi = clients_by_status["COMPLETED"]
cdict = {}
for c in cl:
cdict.setdefault(c, []).append(c.age)
fdict = {}
for c in fi:
fdict.setdefault(c, []).append(c.age)
cl_age = [int(min(x)/1e6) for x in cdict.values()]
fi_age = [int(min(x)/1e6) for x in fdict.values()]
cl_hist = {}
fi_hist = {}
for age in cl_age:
cl_hist.setdefault(age, 0)
cl_hist[age] += 1
for age in fi_age:
fi_hist.setdefault(age, 0)
fi_hist[age] += 1
t0 = min(cl_age) - 1
times = [t0]
cl = [0]
fi = [0]
all_times = set(cl_age) | set(fi_age)
cl_count = 0
fi_count = 0
for time in sorted(all_times):
# Check if there is a datapoint one second earlier, add one if not.
if times[-1] != time-1:
times.append(time)
cl.append(cl_count)
fi.append(fi_count)
cl_count += cl_hist.get(time, 0)
fi_count += fi_hist.get(time, 0)
times.append(time)
cl.append(cl_count)
fi.append(fi_count)
# Convert to hours, starting from 0.
times = [(t-t0)/3600.0 for t in times]
params = {"backend": "png"}
plot_lib.plt.rcParams.update(params)
plot_lib.plt.figure(1)
plot_lib.plt.clf()
plot_lib.plt.plot(times, cl, label="Agents issued.")
plot_lib.plt.plot(times, fi, label="Agents completed.")
plot_lib.plt.title("Agent Coverage")
plot_lib.plt.xlabel("Time (h)")
plot_lib.plt.ylabel(r"Agents")
plot_lib.plt.grid(True)
plot_lib.plt.legend(loc=4)
buf = StringIO.StringIO()
plot_lib.plt.savefig(buf)
buf.seek(0)
return buf.read()
class HuntHostInformationRenderer(fileview.AFF4Stats):
"""Modified HostInformation that reads from hunt_client variable."""
description = "Hunt Client Host Information"
css_class = "TableBody"
attributes_to_show = ["USERNAMES", "HOSTNAME", "MAC_ADDRESS", "INSTALL_DATE",
"SYSTEM", "CLOCK", "CLIENT_INFO"]
def Layout(self, request, response):
"""Produce a summary of the client information."""
client_id = request.REQ.get("hunt_client")
if client_id:
super(HuntHostInformationRenderer, self).Layout(
request, response, client_id=client_id,
aff4_path=rdfvalue.ClientURN(client_id),
age=aff4.ALL_TIMES)
class OutputPluginNoteRenderer(renderers.TemplateRenderer):
"""Baseclass for renderers who render output-plugin-specific notes."""
# Name of the output plugin class that this class should deal with.
for_output_plugin = None
def __init__(self, plugin_def=None, plugin_state=None, **kwargs):
super(OutputPluginNoteRenderer, self).__init__(**kwargs)
if plugin_def is None:
raise ValueError("plugin_def can't be None")
if plugin_state is None:
raise ValueError("plugin_state can't be None")
self.plugin_def = plugin_def
self.plugin_state = plugin_state
class CSVOutputPluginNoteRenderer(OutputPluginNoteRenderer):
"""Note renderer for CSV output plugin."""
for_output_plugin = "CSVOutputPlugin"
layout_template = renderers.Template("""
{% if this.output_urns %}
<div id="{{unique|escape}}" class="well well-small csv-output-note">
<p>CSV output plugin writes to following files
(last update on {{this.plugin_state.last_updated|escape}}):<br/>
{% for output_urn in this.output_urns %}
<a href="#" aff4_path="{{output_urn}}">{{output_urn|escape}}</a><br/>
{% endfor %}
</p>
</div>
{% endif %}
""")
def Layout(self, request, response):
self.output_urns = []
for output_file in self.plugin_state.files_by_type.values():
self.output_urns.append(output_file.urn)
response = super(CSVOutputPluginNoteRenderer, self).Layout(request,
response)
return self.CallJavascript(response, "CSVOutputPluginNoteRenderer.Layout")
class HuntResultsRenderer(semantic.RDFValueCollectionRenderer):
"""Displays a collection of hunt's results."""
layout_template = renderers.Template("""
{% for output_plugin_note in this.output_plugins_notes %}
{{output_plugin_note|safe}}
{% endfor %}
{% if this.exportable_results %}
<div id="generate_archive_{{unique|escape}}" class="well well-small">
<div class="export_tar pull-left">
Results of this hunt can be downloaded as an archive:
<div class="btn-group">
<button name="generate_tar" class="btn btn-default DownloadButton">
Generate TAR.GZ
</button>
<button class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a name="generate_zip" href="#">Generate ZIP</a></li>
</ul>
</div>
</div>
<div class="export_zip pull-left">
Results of this hunt can be downloaded as an archive:
<div class="btn-group">
<button class="btn btn-default DownloadButton" name="generate_zip">
Generate ZIP
</button>
<button class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a name="generate_tar" href="#">Generate TAR.GZ</a></li>
</ul>
</div>
</div>
<div class="pull-right">
<em>NOTE: generated archive will contain <strong>symlinks</strong>.<br/>
Unsure whether your archive utility supports them?<br/>
Just unpack the archive before browsing its contents.</em>
</div>
<div class="clearfix"></div>
</div>
<div id='generate_action_{{unique|escape}}'></div>
{% endif %}
""") + semantic.RDFValueCollectionRenderer.layout_template
error_template = renderers.Template("""
<p>This hunt hasn't stored any results yet.</p>
""")
context_help_url = "user_manual.html#_exporting_a_collection"
def Layout(self, request, response):
"""Layout the hunt results."""
hunt_id = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
hunt = aff4.FACTORY.Open(hunt_id, token=request.token)
metadata_urn = hunt.urn.Add("ResultsMetadata")
metadata = aff4.FACTORY.Create(
metadata_urn, aff4_type="HuntResultsMetadata", mode="r",
token=request.token)
output_plugins = metadata.Get(metadata.Schema.OUTPUT_PLUGINS)
self.output_plugins_notes = []
for _, (plugin_def, plugin_state) in output_plugins.iteritems():
plugin_name = plugin_def.plugin_name
for renderer_class in renderers.Renderer.classes.values():
if getattr(renderer_class, "for_output_plugin", None) == plugin_name:
renderer = renderer_class(plugin_def=plugin_def,
plugin_state=plugin_state)
self.output_plugins_notes.append(renderer.RawHTML(request))
export_view = renderers.CollectionExportView
self.exportable_results = export_view.IsCollectionExportable(
hunt.state.context.results_collection_urn,
token=request.token)
# In this renderer we show hunt results stored in the results collection.
response = super(HuntResultsRenderer, self).Layout(
request, response,
aff4_path=hunt.GetRunner().context.results_collection_urn)
return self.CallJavascript(response, "HuntResultsRenderer.Layout",
exportable_results=self.exportable_results,
hunt_id=hunt_id)
class HuntGenerateResultsArchive(renderers.TemplateRenderer):
layout_template = renderers.Template("""
<div class="alert alert-success">
<em>Generation has started. An email will be sent upon completion.</em>
</div>
""")
def Layout(self, request, response):
"""Start the flow to generate zip file."""
hunt_id = rdfvalue.RDFURN(request.REQ.get("hunt_id"))
archive_format = utils.SmartStr(request.REQ.get("format"))
if (archive_format not in
rdfvalue.ExportHuntResultsFilesAsArchiveArgs.ArchiveFormat.enum_dict):
raise ValueError("Invalid format: %s.", format)
urn = flow.GRRFlow.StartFlow(flow_name="ExportHuntResultFilesAsArchive",
hunt_urn=hunt_id, format=archive_format,
token=request.token)
logging.info("Generating %s results for %s with flow %s.", format,
hunt_id, urn)
return super(HuntGenerateResultsArchive, self).Layout(request, response)
class HuntStatsRenderer(renderers.TemplateRenderer):
"""Display hunt's resources usage stats."""
layout_template = renderers.Template("""
<h3>Total number of clients: {{this.stats.user_cpu_stats.num|escape}}</h3>
<h3>User CPU</h3>
<dl class="dl-horizontal">
<dt>User CPU mean</dt>
<dd>{{this.stats.user_cpu_stats.mean|floatformat}}</dd>
<dt>User CPU stdev</dt>
<dd>{{this.stats.user_cpu_stats.std|floatformat}}</dd>
<dt>Clients Histogram</dt>
<dd class="histogram">
<div id="user_cpu_{{unique|escape}}"></div>
</dd>
</dl>
<h3>System CPU</h3>
<dl class="dl-horizontal">
<dt>System CPU mean</dt>
<dd>{{this.stats.system_cpu_stats.mean|floatformat}}</dd>
<dt>System CPU stdev</dt>
<dd>{{this.stats.system_cpu_stats.std|floatformat}}</dd>
<dt>Clients Histogram</dt>
<dd class="histogram">
<div id="system_cpu_{{unique|escape}}"></div>
</dd>
</dl>
<h3>Network bytes sent</h3>
<dl class="dl-horizontal">
<dt>Network bytes sent mean</dt>
<dd>{{this.stats.network_bytes_sent_stats.mean|floatformat}}</dd>
<dt>Network bytes sent stdev</dt>
<dd>{{this.stats.network_bytes_sent_stats.std|floatformat}}</dd>
<dt>Clients Hisogram</dt>
<dd class="histogram">
<div id="network_bytes_sent_{{unique|escape}}"></div>
</dd>
</dl>
<h3>Worst performers</h3>
<div class="row">
<div class="col-md-8">
<table id="performers_{{unique|escape}}"
class="table table-condensed table-striped table-bordered">
<thead>
<th>Client Id</th>
<th>User CPU</th>
<th>System CPU</th>
<th>Network bytes sent</th>
</thead>
<tbody>
{% for r in this.stats.worst_performers %}
<tr>
<td>{{r.client_html|safe}}</td>
<td>{{r.cpu_usage.user_cpu_time|floatformat}}</td>
<td>{{r.cpu_usage.system_cpu_time|floatformat}}</td>
<td>{{r.network_bytes_sent|escape}}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
""")
error_template = renderers.Template(
"No information available for this Hunt.")
def _HistogramToJSON(self, histogram):
hist_data = [(b.range_max_value, b.num) for b in histogram.bins]
return renderers.JsonDumpForScriptContext(hist_data)
def Layout(self, request, response):
"""Layout the HuntStatsRenderer data."""
hunt_id = request.REQ.get("hunt_id")
if hunt_id:
try:
hunt = aff4.FACTORY.Open(hunt_id,
aff4_type="GRRHunt",
token=request.token)
if hunt.state.Empty():
raise IOError("No valid state could be found.")
self.stats = hunt.state.context.usage_stats
for item in self.stats.worst_performers:
renderer = semantic.FindRendererForObject(item.client_id)
item.client_html = renderer.RawHTML()
self.user_cpu_json_data = self._HistogramToJSON(
self.stats.user_cpu_stats.histogram)
self.system_cpu_json_data = self._HistogramToJSON(
self.stats.user_cpu_stats.histogram)
self.network_bytes_sent_json_data = self._HistogramToJSON(
self.stats.network_bytes_sent_stats.histogram)
response = super(HuntStatsRenderer, self).Layout(request, response)
return self.CallJavascript(
response, "HuntStatsRenderer.Layout",
user_cpu_json_data=self.user_cpu_json_data,
system_cpu_json_data=self.system_cpu_json_data,
network_bytes_sent_json_data=self.network_bytes_sent_json_data)
except IOError:
self.layout_template = self.error_template
return super(HuntStatsRenderer, self).Layout(request, response)
class HuntCrashesRenderer(crash_view.ClientCrashCollectionRenderer):
"""View launched flows in a tree."""
def Layout(self, request, response):
hunt_id = request.REQ.get("hunt_id")
self.crashes_urn = rdfvalue.RDFURN(hunt_id).Add("crashes")
super(HuntCrashesRenderer, self).Layout(request, response)
class HuntOutstandingRenderer(renderers.TableRenderer):
"""A renderer that shows debug information for outstanding clients."""
post_parameters = ["hunt_id"]
def __init__(self, **kwargs):
super(HuntOutstandingRenderer, self).__init__(**kwargs)
self.AddColumn(semantic.RDFValueColumn("Client"))
self.AddColumn(semantic.RDFValueColumn("Flow"))
self.AddColumn(semantic.RDFValueColumn("Incomplete Request #"))
self.AddColumn(semantic.RDFValueColumn("State"))
self.AddColumn(semantic.RDFValueColumn("Args Expected"))
self.AddColumn(semantic.RDFValueColumn("Available Responses"))
self.AddColumn(semantic.RDFValueColumn("Status"))
self.AddColumn(semantic.RDFValueColumn("Expected Responses"))
self.AddColumn(semantic.RDFValueColumn("Client Requests Pending"))
def GetClientRequests(self, client_urns, token):
"""Returns all client requests for the given client urns."""
task_urns = [urn.Add("tasks") for urn in client_urns]
client_requests_raw = data_store.DB.MultiResolveRegex(task_urns, "task:.*",
token=token)
client_requests = {}
for client_urn, requests in client_requests_raw:
client_id = str(client_urn)[6:6+18]
client_requests.setdefault(client_id, [])
for _, serialized, _ in requests:
client_requests[client_id].append(rdfvalue.GrrMessage(serialized))
return client_requests
def GetAllSubflows(self, hunt_urn, client_urns, token):
"""Lists all subflows for a given hunt for all clients in client_urns."""
client_ids = [urn.Split()[0] for urn in client_urns]
client_bases = [hunt_urn.Add(client_id) for client_id in client_ids]
all_flows = []
act_flows = client_bases
while act_flows:
next_flows = []
for _, children in aff4.FACTORY.MultiListChildren(act_flows, token=token):
for flow_urn in children:
next_flows.append(flow_urn)
all_flows.extend(next_flows)
act_flows = next_flows
return all_flows
def GetFlowRequests(self, flow_urns, token):
"""Returns all outstanding requests for the flows in flow_urns."""
flow_requests = {}
flow_request_urns = [flow_urn.Add("state") for flow_urn in flow_urns]
for flow_urn, values in data_store.DB.MultiResolveRegex(
flow_request_urns, "flow:.*", token=token):
for subject, serialized, _ in values:
try:
if "status" in subject:
msg = rdfvalue.GrrMessage(serialized)
else:
msg = rdfvalue.RequestState(serialized)
except Exception as e: # pylint: disable=broad-except
logging.warn("Error while parsing: %s", e)
continue
flow_requests.setdefault(flow_urn, []).append(msg)
return flow_requests
def BuildTable(self, start_row, end_row, request):
"""Renders the table."""
hunt_id = request.REQ.get("hunt_id")
token = request.token
if hunt_id is None:
return
hunt_id = rdfvalue.RDFURN(hunt_id)
hunt = aff4.FACTORY.Open(hunt_id, aff4_type="GRRHunt", age=aff4.ALL_TIMES,
token=token)
clients_by_status = hunt.GetClientsByStatus()
outstanding = clients_by_status["OUTSTANDING"]
self.size = len(outstanding)
outstanding = sorted(outstanding)[start_row:end_row]
all_flow_urns = self.GetAllSubflows(hunt_id, outstanding, token)
flow_requests = self.GetFlowRequests(all_flow_urns, token)
try:
client_requests = self.GetClientRequests(outstanding, token)
except access_control.UnauthorizedAccess:
client_requests = None
waitingfor = {}
status_by_request = {}
for flow_urn in flow_requests:
for obj in flow_requests[flow_urn]:
if isinstance(obj, rdfvalue.RequestState):
waitingfor.setdefault(flow_urn, obj)
if waitingfor[flow_urn].id > obj.id:
waitingfor[flow_urn] = obj
elif isinstance(obj, rdfvalue.GrrMessage):
status_by_request.setdefault(flow_urn, {})[obj.request_id] = obj
response_urns = []
for request_base_urn, request in waitingfor.iteritems():
response_urns.append(rdfvalue.RDFURN(request_base_urn).Add(
"request:%08X" % request.id))
response_dict = dict(data_store.DB.MultiResolveRegex(
response_urns, "flow:.*", token=token))
row_index = start_row
for flow_urn in sorted(all_flow_urns):
request_urn = flow_urn.Add("state")
client_id = flow_urn.Split()[2]
try:
request_obj = waitingfor[request_urn]
response_urn = rdfvalue.RDFURN(request_urn).Add(
"request:%08X" % request_obj.id)
responses_available = len(response_dict.setdefault(response_urn, []))
status_available = "No"
responses_expected = "Unknown"
if request_obj.id in status_by_request.setdefault(request_urn, {}):
status_available = "Yes"
status = status_by_request[request_urn][request_obj.id]
responses_expected = status.response_id
if client_requests is None:
client_requests_available = "Must use raw access."
else:
client_requests_available = 0
for client_req in client_requests.setdefault(client_id, []):
if request_obj.request.session_id == client_req.session_id:
client_requests_available += 1
row_data = {
"Client": client_id,
"Flow": flow_urn,
"Incomplete Request #": request_obj.id,
"State": request_obj.next_state,
"Args Expected": request_obj.request.args_rdf_name,
"Available Responses": responses_available,
"Status": status_available,
"Expected Responses": responses_expected,
"Client Requests Pending": client_requests_available}
except KeyError:
row_data = {
"Client": client_id,
"Flow": flow_urn,
"Incomplete Request #": "No request found"}
self.AddRow(row_data, row_index=row_index)
row_index += 1
| apache-2.0 | 8,859,519,383,900,725,000 | 1,749,845,018,495,533,300 | 32.409025 | 80 | 0.643757 | false |
jblackburne/scikit-learn | sklearn/manifold/setup.py | 24 | 1279 | import os
from os.path import join
import numpy
from numpy.distutils.misc_util import Configuration
from sklearn._build_utils import get_blas_info
def configuration(parent_package="", top_path=None):
config = Configuration("manifold", parent_package, top_path)
libraries = []
if os.name == 'posix':
libraries.append('m')
config.add_extension("_utils",
sources=["_utils.c"],
include_dirs=[numpy.get_include()],
libraries=libraries,
extra_compile_args=["-O3"])
cblas_libs, blas_info = get_blas_info()
eca = blas_info.pop('extra_compile_args', [])
eca.append("-O4")
config.add_extension("_barnes_hut_tsne",
libraries=cblas_libs,
sources=["_barnes_hut_tsne.c"],
include_dirs=[join('..', 'src', 'cblas'),
numpy.get_include(),
blas_info.pop('include_dirs', [])],
extra_compile_args=eca, **blas_info)
config.add_subpackage('tests')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(**configuration().todict())
| bsd-3-clause | -1,393,273,860,074,834,400 | 5,283,546,842,337,157,000 | 34.527778 | 74 | 0.526974 | false |
JioCloud/tempest | tempest/services/volume/json/snapshots_client.py | 6 | 8121 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import time
from oslo_log import log as logging
from six.moves.urllib import parse as urllib
from tempest_lib import exceptions as lib_exc
from tempest.common import service_client
from tempest import exceptions
LOG = logging.getLogger(__name__)
class BaseSnapshotsClient(service_client.ServiceClient):
"""Base Client class to send CRUD Volume API requests."""
create_resp = 200
def list_snapshots(self, detail=False, params=None):
"""List all the snapshot."""
url = 'snapshots'
if detail:
url += '/detail'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBodyList(resp, body['snapshots'])
def show_snapshot(self, snapshot_id):
"""Returns the details of a single snapshot."""
url = "snapshots/%s" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['snapshot'])
def create_snapshot(self, volume_id, **kwargs):
"""
Creates a new snapshot.
volume_id(Required): id of the volume.
force: Create a snapshot even if the volume attached (Default=False)
display_name: Optional snapshot Name.
display_description: User friendly snapshot description.
"""
post_body = {'volume_id': volume_id}
post_body.update(kwargs)
post_body = json.dumps({'snapshot': post_body})
resp, body = self.post('snapshots', post_body)
body = json.loads(body)
self.expected_success(self.create_resp, resp.status)
return service_client.ResponseBody(resp, body['snapshot'])
def update_snapshot(self, snapshot_id, **kwargs):
"""Updates a snapshot."""
put_body = json.dumps({'snapshot': kwargs})
resp, body = self.put('snapshots/%s' % snapshot_id, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['snapshot'])
# NOTE(afazekas): just for the wait function
def _get_snapshot_status(self, snapshot_id):
body = self.show_snapshot(snapshot_id)
status = body['status']
# NOTE(afazekas): snapshot can reach an "error"
# state in a "normal" lifecycle
if (status == 'error'):
raise exceptions.SnapshotBuildErrorException(
snapshot_id=snapshot_id)
return status
# NOTE(afazkas): Wait reinvented again. It is not in the correct layer
def wait_for_snapshot_status(self, snapshot_id, status):
"""Waits for a Snapshot to reach a given status."""
start_time = time.time()
old_value = value = self._get_snapshot_status(snapshot_id)
while True:
dtime = time.time() - start_time
time.sleep(self.build_interval)
if value != old_value:
LOG.info('Value transition from "%s" to "%s"'
'in %d second(s).', old_value,
value, dtime)
if (value == status):
return value
if dtime > self.build_timeout:
message = ('Time Limit Exceeded! (%ds)'
'while waiting for %s, '
'but we got %s.' %
(self.build_timeout, status, value))
raise exceptions.TimeoutException(message)
time.sleep(self.build_interval)
old_value = value
value = self._get_snapshot_status(snapshot_id)
def delete_snapshot(self, snapshot_id):
"""Delete Snapshot."""
resp, body = self.delete("snapshots/%s" % str(snapshot_id))
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
def is_resource_deleted(self, id):
try:
self.show_snapshot(id)
except lib_exc.NotFound:
return True
return False
@property
def resource_type(self):
"""Returns the primary type of resource this client works with."""
return 'volume-snapshot'
def reset_snapshot_status(self, snapshot_id, status):
"""Reset the specified snapshot's status."""
post_body = json.dumps({'os-reset_status': {"status": status}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
def update_snapshot_status(self, snapshot_id, status, progress):
"""Update the specified snapshot's status."""
post_body = {
'status': status,
'progress': progress
}
post_body = json.dumps({'os-update_snapshot_status': post_body})
url = 'snapshots/%s/action' % str(snapshot_id)
resp, body = self.post(url, post_body)
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
def create_snapshot_metadata(self, snapshot_id, metadata):
"""Create metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.post(url, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['metadata'])
def show_snapshot_metadata(self, snapshot_id):
"""Get metadata of the snapshot."""
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['metadata'])
def update_snapshot_metadata(self, snapshot_id, metadata):
"""Update metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.put(url, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['metadata'])
def update_snapshot_metadata_item(self, snapshot_id, id, meta_item):
"""Update metadata item for the snapshot."""
put_body = json.dumps({'meta': meta_item})
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.put(url, put_body)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body['meta'])
def delete_snapshot_metadata_item(self, snapshot_id, id):
"""Delete metadata item for the snapshot."""
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.delete(url)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
def force_delete_snapshot(self, snapshot_id):
"""Force Delete Snapshot."""
post_body = json.dumps({'os-force_delete': {}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
self.expected_success(202, resp.status)
return service_client.ResponseBody(resp, body)
class SnapshotsClient(BaseSnapshotsClient):
"""Client class to send CRUD Volume V1 API requests."""
| apache-2.0 | 492,917,614,915,767,300 | 192,799,102,026,527,800 | 39.20297 | 78 | 0.618889 | false |
2013Commons/hue | desktop/core/ext-py/Django-1.4.5/tests/regressiontests/utils/functional.py | 93 | 1084 | from django.utils import unittest
from django.utils.functional import lazy, lazy_property
class FunctionalTestCase(unittest.TestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
"""Test that lazy also finds base class methods in the proxy object"""
class Base(object):
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertTrue('base_method' in dir(t))
def test_lazy_property(self):
class A(object):
def _get_do(self):
raise NotImplementedError
def _set_do(self, value):
raise NotImplementedError
do = lazy_property(_get_do, _set_do)
class B(A):
def _get_do(self):
return "DO IT"
self.assertRaises(NotImplementedError, lambda: A().do)
self.assertEqual(B().do, 'DO IT')
| apache-2.0 | 1,380,424,723,014,755,800 | -4,597,852,565,710,527,000 | 26.794872 | 78 | 0.560886 | false |
yongshengwang/hue | build/env/lib/python2.7/site-packages/setuptools/tests/test_test.py | 148 | 2329 | # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import os
import site
import pytest
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS("""
from setuptools import setup
setup(name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
)
""")
NS_INIT = DALS("""
# -*- coding: Latin-1 -*-
# Söme Arbiträry Ünicode to test Distribute Issüé 310
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
""")
TEST_PY = DALS("""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print "Foo" # Should fail under Python 3 unless 2to3 is used
test_suite = unittest.makeSuite(TestTest)
""")
@pytest.fixture
def sample_test(tmpdir_cwd):
os.makedirs('name/space/tests')
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
# name/__init__.py
with open('name/__init__.py', 'wb') as f:
f.write(NS_INIT.encode('Latin-1'))
# name/space/__init__.py
with open('name/space/__init__.py', 'wt') as f:
f.write('#empty\n')
# name/space/tests/__init__.py
with open('name/space/tests/__init__.py', 'wt') as f:
f.write(TEST_PY)
@pytest.mark.skipif('hasattr(sys, "real_prefix")')
@pytest.mark.usefixtures('user_override')
@pytest.mark.usefixtures('sample_test')
class TestTestTest:
def test_test(self):
params = dict(
name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
use_2to3=True,
)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
with contexts.quiet():
# The test runner calls sys.exit
with contexts.suppress_exceptions(SystemExit):
cmd.run()
| apache-2.0 | -4,065,185,370,151,814,700 | -8,282,093,314,023,349,000 | 24.538462 | 72 | 0.586919 | false |
MTDEV-KERNEL/MOTO-KERNEL | scripts/rt-tester/rt-tester.py | 904 | 5366 | #!/usr/bin/env python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"lockbkl" : "9",
"unlockbkl" : "10",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Seperate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 | 2,659,460,307,049,908,000 | -2,411,003,800,301,748,000 | 23.171171 | 70 | 0.493291 | false |
p4datasystems/CarnotKE | jyhton/Lib/xml/dom/__init__.py | 112 | 7194 | ########################################################################
#
# File Name: __init__.py
#
#
"""
WWW: http://4suite.org/4DOM e-mail: [email protected]
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.org/COPYRIGHT for license and copyright information
"""
class Node:
"""Class giving the nodeType and tree-position constants."""
# DOM implementations may use this as a base class for their own
# Node implementations. If they don't, the constants defined here
# should still be used as the canonical definitions as they match
# the values given in the W3C recommendation. Client code can
# safely refer to these values in all tests of Node.nodeType
# values.
ELEMENT_NODE = 1
ATTRIBUTE_NODE = 2
TEXT_NODE = 3
CDATA_SECTION_NODE = 4
ENTITY_REFERENCE_NODE = 5
ENTITY_NODE = 6
PROCESSING_INSTRUCTION_NODE = 7
COMMENT_NODE = 8
DOCUMENT_NODE = 9
DOCUMENT_TYPE_NODE = 10
DOCUMENT_FRAGMENT_NODE = 11
NOTATION_NODE = 12
# Based on DOM Level 3 (WD 9 April 2002)
TREE_POSITION_PRECEDING = 0x01
TREE_POSITION_FOLLOWING = 0x02
TREE_POSITION_ANCESTOR = 0x04
TREE_POSITION_DESCENDENT = 0x08
TREE_POSITION_EQUIVALENT = 0x10
TREE_POSITION_SAME_NODE = 0x20
TREE_POSITION_DISCONNECTED = 0x00
class UserDataHandler:
"""Class giving the operation constants for UserDataHandler.handle()."""
# Based on DOM Level 3 (WD 9 April 2002)
NODE_CLONED = 1
NODE_IMPORTED = 2
NODE_DELETED = 3
NODE_RENAMED = 4
class DOMError:
"""Class giving constants for error severity."""
# Based on DOM Level 3 (WD 9 April 2002)
SEVERITY_WARNING = 0
SEVERITY_ERROR = 1
SEVERITY_FATAL_ERROR = 2
# DOMException codes
INDEX_SIZE_ERR = 1
DOMSTRING_SIZE_ERR = 2
HIERARCHY_REQUEST_ERR = 3
WRONG_DOCUMENT_ERR = 4
INVALID_CHARACTER_ERR = 5
NO_DATA_ALLOWED_ERR = 6
NO_MODIFICATION_ALLOWED_ERR = 7
NOT_FOUND_ERR = 8
NOT_SUPPORTED_ERR = 9
INUSE_ATTRIBUTE_ERR = 10
# DOM Level 2
INVALID_STATE_ERR = 11
SYNTAX_ERR = 12
INVALID_MODIFICATION_ERR = 13
NAMESPACE_ERR = 14
INVALID_ACCESS_ERR = 15
# DOM Level 3
VALIDATION_ERR = 16
# EventException codes
UNSPECIFIED_EVENT_TYPE_ERR = 0
# Fourthought specific codes
FT_EXCEPTION_BASE = 1000
XML_PARSE_ERR = FT_EXCEPTION_BASE + 1
#RangeException codes
BAD_BOUNDARYPOINTS_ERR = 1
INVALID_NODE_TYPE_ERR = 2
class DOMException(Exception):
def __init__(self, code, msg=''):
self.code = code
self.msg = msg or DOMExceptionStrings[code]
def __str__(self):
return self.msg
class EventException(Exception):
def __init__(self, code, msg=''):
self.code = code
self.msg = msg or EventExceptionStrings[code]
return
def __str__(self):
return self.msg
class RangeException(Exception):
def __init__(self, code, msg):
self.code = code
self.msg = msg or RangeExceptionStrings[code]
Exception.__init__(self, self.msg)
class FtException(Exception):
def __init__(self, code, *args):
self.code = code
self.msg = FtExceptionStrings[code] % args
return
def __str__(self):
return self.msg
class IndexSizeErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INDEX_SIZE_ERR, msg)
class DomstringSizeErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, DOMSTRING_SIZE_ERR, msg)
# DOMStringSizeErr was accidentally introduced in rev 1.14 of this
# file, and was released as part of PyXML 0.6.4, 0.6.5, 0.6.6, 0.7,
# and 0.7.1. It has never been part of the Python DOM API, although
# it better matches the W3C recommendation. It should remain for
# compatibility, unfortunately.
#
DOMStringSizeErr = DomstringSizeErr
class HierarchyRequestErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, HIERARCHY_REQUEST_ERR, msg)
class WrongDocumentErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, WRONG_DOCUMENT_ERR, msg)
class InvalidCharacterErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_CHARACTER_ERR, msg)
class NoDataAllowedErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NO_DATA_ALLOWED_ERR, msg)
class NoModificationAllowedErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NO_MODIFICATION_ALLOWED_ERR, msg)
class NotFoundErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NOT_FOUND_ERR, msg)
class NotSupportedErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NOT_SUPPORTED_ERR, msg)
class InuseAttributeErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INUSE_ATTRIBUTE_ERR, msg)
class InvalidStateErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_STATE_ERR, msg)
class SyntaxErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, SYNTAX_ERR, msg)
class InvalidModificationErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_MODIFICATION_ERR, msg)
class NamespaceErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, NAMESPACE_ERR, msg)
class InvalidAccessErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, INVALID_ACCESS_ERR, msg)
class ValidationErr(DOMException):
def __init__(self, msg=''):
DOMException.__init__(self, VALIDATION_ERR, msg)
class UnspecifiedEventTypeErr(EventException):
def __init__(self, msg=''):
EventException.__init__(self, UNSPECIFIED_EVENT_TYPE_ERR, msg)
class XmlParseErr(FtException):
def __init__(self, msg=''):
FtException.__init__(self, XML_PARSE_ERR, msg)
#Specific Range Exceptions
class BadBoundaryPointsErr(RangeException):
def __init__(self, msg=''):
RangeException.__init__(self, BAD_BOUNDARYPOINTS_ERR, msg)
class InvalidNodeTypeErr(RangeException):
def __init__(self, msg=''):
RangeException.__init__(self, INVALID_NODE_TYPE_ERR, msg)
XML_NAMESPACE = "http://www.w3.org/XML/1998/namespace"
XMLNS_NAMESPACE = "http://www.w3.org/2000/xmlns/"
XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml"
EMPTY_NAMESPACE = None
EMPTY_PREFIX = None
import MessageSource
DOMExceptionStrings = MessageSource.__dict__['DOMExceptionStrings']
EventExceptionStrings = MessageSource.__dict__['EventExceptionStrings']
FtExceptionStrings = MessageSource.__dict__['FtExceptionStrings']
RangeExceptionStrings = MessageSource.__dict__['RangeExceptionStrings']
from domreg import getDOMImplementation,registerDOMImplementation
| apache-2.0 | -3,821,443,872,412,914,700 | -2,089,633,706,328,690,700 | 30.008621 | 76 | 0.635252 | false |
analyseuc3m/ANALYSE-v1 | common/test/acceptance/fixtures/xqueue.py | 206 | 1402 | """
Fixture to configure XQueue response.
"""
import requests
import json
from . import XQUEUE_STUB_URL
class XQueueResponseFixtureError(Exception):
"""
Error occurred while configuring the stub XQueue.
"""
pass
class XQueueResponseFixture(object):
"""
Configure the XQueue stub's response to submissions.
"""
def __init__(self, pattern, response_dict):
"""
Configure XQueue stub to POST `response_dict` (a dictionary)
back to the LMS when it receives a submission that contains the string
`pattern`.
Remember that there is one XQueue stub shared by all the tests;
if possible, you should have tests use unique queue names
to avoid conflict between tests running in parallel.
"""
self._pattern = pattern
self._response_dict = response_dict
def install(self):
"""
Configure the stub via HTTP.
"""
url = XQUEUE_STUB_URL + "/set_config"
# Configure the stub to respond to submissions to our queue
payload = {self._pattern: json.dumps(self._response_dict)}
response = requests.put(url, data=payload)
if not response.ok:
raise XQueueResponseFixtureError(
"Could not configure XQueue stub for queue '{1}'. Status code: {2}".format(
self._pattern, self._response_dict))
| agpl-3.0 | 2,238,362,010,572,588,300 | -6,629,848,927,013,092,000 | 28.208333 | 92 | 0.630528 | false |
falau/pogom | pogom/pgoapi/protos/POGOProtos/Networking/Requests/Messages/DownloadSettingsMessage_pb2.py | 16 | 2402 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Requests/Messages/DownloadSettingsMessage.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Requests/Messages/DownloadSettingsMessage.proto',
package='POGOProtos.Networking.Requests.Messages',
syntax='proto3',
serialized_pb=_b('\nEPOGOProtos/Networking/Requests/Messages/DownloadSettingsMessage.proto\x12\'POGOProtos.Networking.Requests.Messages\"\'\n\x17\x44ownloadSettingsMessage\x12\x0c\n\x04hash\x18\x01 \x01(\tb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_DOWNLOADSETTINGSMESSAGE = _descriptor.Descriptor(
name='DownloadSettingsMessage',
full_name='POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hash', full_name='POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage.hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=114,
serialized_end=153,
)
DESCRIPTOR.message_types_by_name['DownloadSettingsMessage'] = _DOWNLOADSETTINGSMESSAGE
DownloadSettingsMessage = _reflection.GeneratedProtocolMessageType('DownloadSettingsMessage', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADSETTINGSMESSAGE,
__module__ = 'POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Requests.Messages.DownloadSettingsMessage)
))
_sym_db.RegisterMessage(DownloadSettingsMessage)
# @@protoc_insertion_point(module_scope)
| mit | -7,109,779,963,204,615,000 | -6,942,753,385,115,726,000 | 33.811594 | 220 | 0.768526 | false |
HHS/CoECI-CMS-Healthcare-Fraud-Prevention | partnerclient/hfppnetwork/partner/conversion/test/test_entities.py | 3 | 3134 | ##
# Copyright (C) 2013 TopCoder Inc., All Rights Reserved.
##
__author__ = 'Easyhard'
__version__ = '1.0'
import entities
from schema import entity
import unittest
from datetime import date
from exception import ConfigurationException
class TestEntities(unittest.TestCase):
"""Testcases for entities.py"""
def setUp(self):
pass
def test_entity_class(self):
"""Testing Entity class's method"""
testing = entities.Testing()
self.assertEqual(testing.get_field_list(), entity['Testing']['fields'])
self.assertEqual(testing.typeof('f1'), 'string')
self.assertEqual(testing.typeof('f2'), 'date')
self.assertEqual(testing.typeof('f3'), 'int')
self.assertEqual(testing.typeof('f4'), 'decimal')
def test_propertylist(self):
"""Checker if generated all properties."""
for class_name, data in entity.items():
instance = getattr(entities, class_name)()
for name, stype, doc in data['fields']:
a = getattr(instance, name)
self.assertIsNone(a)
self.assertIsNotNone(instance.typeof)
def test_property_setget(self):
"""Checker all properties' getter and setter"""
for class_name, data in entity.items():
instance = getattr(entities, class_name)()
for name, stype, doc in data['fields']:
if stype == 'date':
setattr(instance, name, date.min)
self.assertEqual(getattr(instance, name), date.min)
if stype == 'string':
setattr(instance, name, 'testing')
self.assertEqual(getattr(instance, name), 'testing')
setattr(instance, name, 123)
# automatic convert to string
self.assertEqual(getattr(instance, name), '123')
if stype == 'int':
setattr(instance, name, 232)
self.assertEqual(getattr(instance, name), 232)
setattr(instance, name, 12.3)
# automiatic convert to int
self.assertEqual(getattr(instance, name), 12)
# raise exception if caonnot convert
with self.assertRaises(ValueError):
setattr(instance, name, 'abc')
if stype == 'decimal':
setattr(instance, name, 232)
self.assertEqual(getattr(instance, name), 232.0)
setattr(instance, name, 12.3)
# automiatic convert to int
self.assertEqual(getattr(instance, name), 12.3)
# raise exception if caonnot convert
with self.assertRaises(ValueError):
setattr(instance, name, 'abc')
def test_exceptions(self):
"""Testing raising of ConfigurationException"""
class Foo(object):
pass
with self.assertRaises(ConfigurationException):
Foo = entities.entity_Class(Foo)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 4,540,512,445,582,259,000 | -8,691,596,140,609,857,000 | 38.670886 | 79 | 0.557116 | false |
chunywang/crosswalk-test-suite | stability/stability-iterative-android-tests/iterative/Pause_Resume_Webapp_Repeatedly.py | 18 | 2868 | #!/usr/bin/env python
# coding=utf-8
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Hao<[email protected]>
import unittest
import os
import sys
import commands
import shutil
import time
import subprocess
from TestApp import *
reload(sys)
sys.setdefaultencoding('utf-8')
SCRIPT_PATH = os.path.realpath(__file__)
ConstPath = os.path.dirname(SCRIPT_PATH)
def setUp():
global device
#device = 'E6OKCY411012'
device = os.environ.get('DEVICE_ID')
if not device:
print 'Get env error\n'
sys.exit(1)
class TestPauseResumeWebappRepeatedly(unittest.TestCase):
def test_pause_resume_webapp_repeatedly(self):
setUp()
testapp = TestApp(device, ConstPath + "/../iterative.apk",
"org.xwalk.iterative", "IterativeActivity")
try:
if not testapp.isInstalled():
testapp.install()
testapp.launch()
# Pause and Resume 50 times
for i in range(50):
time.sleep(2)
# swtich to back
self.assertTrue(testapp.switch())
time.sleep(2)
# swtich to front
self.assertTrue(testapp.switch())
testapp.stop()
except Exception as e:
print "Error: %s" % e
testapp.stop()
self.assertTrue(False)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 3,253,537,982,301,369,000 | -3,515,241,485,446,802,000 | 35.303797 | 80 | 0.680614 | false |
jostep/tensorflow | tensorflow/contrib/specs/python/params_ops.py | 186 | 3104 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operators for concise TensorFlow parameter specifications.
This module is used as an environment for evaluating expressions
in the "params" DSL.
Specifications are intended to assign simple numerical
values. Examples:
--params "n=64; d=5" --spec "(Cr(n) | Mp([2, 2])) ** d | Fm"
The random parameter primitives are useful for running large numbers
of experiments with randomly distributed parameters:
--params "n=Li(5,500); d=Ui(1,5)" --spec "(Cr(n) | Mp([2, 2])) ** d | Fm"
Internally, this might be implemented as follows:
params = specs.create_params(FLAGS.params, {})
logging.info(repr(params))
net = specs.create_net(FLAGS.spec, inputs, params)
Note that separating the specifications into parameters and network
creation allows us to log the random parameter values easily.
The implementation of this will change soon in order to support
hyperparameter tuning with steering. Instead of returning a number,
the primitives below will return a class instance that is then
used to generate a random number by the framework.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Lint disabled because these are operators in the DSL, not regular
# Python functions.
# pylint: disable=invalid-name
# pylint: disable=wildcard-import,unused-wildcard-import,redefining-builtin
# pylint: disable=redefined-builtin,g-importing-member,no-member
# make available all math expressions
import math
from math import *
import random
# pylint: enable=wildcard-import,unused-wildcard-import,redefining-builtin
# pylint: enable=redefined-builtin,g-importing-member,no-member
def Uf(lo=0.0, hi=1.0):
"""Uniformly distributed floating number."""
return random.uniform(lo, hi)
def Ui(lo, hi):
"""Uniformly distributed integer, inclusive limits."""
return random.randint(lo, hi)
def Lf(lo, hi):
"""Log-uniform distributed floatint point number."""
return math.exp(random.uniform(math.log(lo), math.log(hi)))
def Li(lo, hi):
"""Log-uniform distributed integer, inclusive limits."""
return int(math.floor(math.exp(random.uniform(math.log(lo),
math.log(hi+1-1e-5)))))
def Nt(mu, sigma, limit=3.0):
"""Normally distributed floating point number with truncation."""
return min(max(random.gauss(mu, sigma), mu-limit*sigma), mu+limit*sigma)
# pylint: enable=invalid-name
| apache-2.0 | -7,094,407,560,042,723 | 7,013,698,937,446,333,000 | 34.678161 | 80 | 0.718428 | false |
MartinDelzant/scikit-learn | sklearn/utils/tests/test_random.py | 230 | 7344 | from __future__ import division
import numpy as np
import scipy.sparse as sp
from scipy.misc import comb as combinations
from numpy.testing import assert_array_almost_equal
from sklearn.utils.random import sample_without_replacement
from sklearn.utils.random import random_choice_csc
from sklearn.utils.testing import (
assert_raises,
assert_equal,
assert_true)
###############################################################################
# test custom sampling without replacement algorithm
###############################################################################
def test_invalid_sample_without_replacement_algorithm():
assert_raises(ValueError, sample_without_replacement, 5, 4, "unknown")
def test_sample_without_replacement_algorithms():
methods = ("auto", "tracking_selection", "reservoir_sampling", "pool")
for m in methods:
def sample_without_replacement_method(n_population, n_samples,
random_state=None):
return sample_without_replacement(n_population, n_samples,
method=m,
random_state=random_state)
check_edge_case_of_sample_int(sample_without_replacement_method)
check_sample_int(sample_without_replacement_method)
check_sample_int_distribution(sample_without_replacement_method)
def check_edge_case_of_sample_int(sample_without_replacement):
# n_poluation < n_sample
assert_raises(ValueError, sample_without_replacement, 0, 1)
assert_raises(ValueError, sample_without_replacement, 1, 2)
# n_population == n_samples
assert_equal(sample_without_replacement(0, 0).shape, (0, ))
assert_equal(sample_without_replacement(1, 1).shape, (1, ))
# n_population >= n_samples
assert_equal(sample_without_replacement(5, 0).shape, (0, ))
assert_equal(sample_without_replacement(5, 1).shape, (1, ))
# n_population < 0 or n_samples < 0
assert_raises(ValueError, sample_without_replacement, -1, 5)
assert_raises(ValueError, sample_without_replacement, 5, -1)
def check_sample_int(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# the sample is of the correct length and contains only unique items
n_population = 100
for n_samples in range(n_population + 1):
s = sample_without_replacement(n_population, n_samples)
assert_equal(len(s), n_samples)
unique = np.unique(s)
assert_equal(np.size(unique), n_samples)
assert_true(np.all(unique < n_population))
# test edge case n_population == n_samples == 0
assert_equal(np.size(sample_without_replacement(0, 0)), 0)
def check_sample_int_distribution(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# sample generates all possible permutations
n_population = 10
# a large number of trials prevents false negatives without slowing normal
# case
n_trials = 10000
for n_samples in range(n_population):
# Counting the number of combinations is not as good as counting the
# the number of permutations. However, it works with sampling algorithm
# that does not provide a random permutation of the subset of integer.
n_expected = combinations(n_population, n_samples, exact=True)
output = {}
for i in range(n_trials):
output[frozenset(sample_without_replacement(n_population,
n_samples))] = None
if len(output) == n_expected:
break
else:
raise AssertionError(
"number of combinations != number of expected (%s != %s)" %
(len(output), n_expected))
def test_random_choice_csc(n_samples=10000, random_state=24):
# Explicit class probabilities
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Implicit class probabilities
classes = [[0, 1], [1, 2]] # test for array-like support
class_probabilites = [np.array([0.5, 0.5]), np.array([0, 1/2, 1/2])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Edge case proabilites 1.0 and 0.0
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([1.0, 0.0]), np.array([0.0, 1.0, 0.0])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel(),
minlength=len(class_probabilites[k])) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# One class target data
classes = [[1], [0]] # test for array-like support
class_probabilites = [np.array([0.0, 1.0]), np.array([1.0])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
def test_random_choice_csc_errors():
# the length of an array in classes and class_probabilites is mismatched
classes = [np.array([0, 1]), np.array([0, 1, 2, 3])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array(["a", "1"]), np.array(["z", "1", "2"])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array([4.2, 0.1]), np.array([0.1, 0.2, 9.4])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# Given proabilites don't sum to 1
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.6]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
| bsd-3-clause | -5,054,244,766,690,883,000 | -3,754,041,254,494,881,300 | 39.351648 | 79 | 0.607843 | false |
ufieeehw/IEEE2017 | ieee2017_tf_broadcaster/nodes/tf_broadcaster.py | 1 | 3127 | #!/usr/bin/env python
'''
TF Broadcaster: Keeps track of the various coordinate frames on the vehicle in
relation to the map and each other.
'''
from __future__ import division
import rospy
import tf
from nav_msgs.msg import Odometry
from std_msgs.msg import Float64
__author__ = "Anthony Olive"
__maintainer__ = "Anthony Olive"
__email__ = "[email protected]"
__copyright__ = "Copyright 2017, UF IEEE"
__license__ = "MIT"
class TFBroadcaster():
def __init__(self):
self.__broadcaster = tf.TransformBroadcaster()
# Subscribe to topics for the positions of dynamic frames
self.__odom_subscriber = rospy.Subscriber("/odom", Odometry, self.__update_odom, queue_size=2)
# Publish the transformations at a frequency specified by the rate parameter
publishing_rate = rospy.get_param("~publishing_rate", 30)
rospy.Timer(rospy.Duration(1.0 / publishing_rate), self.__publish_static, oneshot=False)
def __update_odom(self, msg):
msg = msg.pose.pose
self.__broadcaster.sendTransform((msg.position.x, msg.position.y, 0),
(msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w),
rospy.Time.now(), "odom", "map")
def __publish_static(self, event):
'''
Transformations between coordinate frames that are at fixed
positions relative to each other.
'''
# Transformation from odom to base_link
# The origin of the base_link frame is at the center of the top plate
# This provides planar coordinates of base_link on the map plus it's elevation
self.__broadcaster.sendTransform((0, 0, 0.119),
tf.transformations.quaternion_from_euler(0, 0, 0),
rospy.Time.now(), "base_link", "odom")
# Transformations from base_link to each LiDAR
# All frames share a planar center, but the lidar_fused frame is lower
# Each LiDAR is rotated such that the beam is emitted away from the vehicle
self.__broadcaster.sendTransform((0, 0.125368, -0.0775),
tf.transformations.quaternion_from_euler(0, 3.14159, -1.590796),
rospy.Time.now(), "lidar_left", "base_link")
self.__broadcaster.sendTransform((0.1, 0, -0.0775),
tf.transformations.quaternion_from_euler(3.14159, 0, 0),
rospy.Time.now(), "lidar_front", "base_link")
self.__broadcaster.sendTransform((0, -0.125368, -0.0775),
tf.transformations.quaternion_from_euler(0, 3.14159, 1.570796),
rospy.Time.now(), "lidar_right", "base_link")
self.__broadcaster.sendTransform((-0.1, 0, -0.0775),
tf.transformations.quaternion_from_euler(3.14159, 0, 3.14159),
rospy.Time.now(), "lidar_back", "base_link")
# Transformations from base_link to the fused LiDAR pointcloud
# Both frames share a planar center, but the lidar_fused frame is lower
self.__broadcaster.sendTransform((0,0,-0.0775),
tf.transformations.quaternion_from_euler(0,0,0),
rospy.Time.now(), "lidar_fused", "base_link")
# Transformations from base_link to the IMU
#self.__broadcaster.sendTransform((0,0,0),
# tf.transformations.quaternion_from_euler(0,0,0),
# rospy.Time.now(), "imu", "base_link")
if __name__ == "__main__":
rospy.init_node('tf_broadcaster')
tf_broadcaster = TFBroadcaster()
rospy.spin()
| mit | -2,876,288,266,993,542,700 | -5,747,468,585,393,876,000 | 35.360465 | 96 | 0.707387 | false |
mozillazg/Unidecode | unidecode/x014.py | 252 | 4300 | data = (
'[?]', # 0x00
'e', # 0x01
'aai', # 0x02
'i', # 0x03
'ii', # 0x04
'o', # 0x05
'oo', # 0x06
'oo', # 0x07
'ee', # 0x08
'i', # 0x09
'a', # 0x0a
'aa', # 0x0b
'we', # 0x0c
'we', # 0x0d
'wi', # 0x0e
'wi', # 0x0f
'wii', # 0x10
'wii', # 0x11
'wo', # 0x12
'wo', # 0x13
'woo', # 0x14
'woo', # 0x15
'woo', # 0x16
'wa', # 0x17
'wa', # 0x18
'waa', # 0x19
'waa', # 0x1a
'waa', # 0x1b
'ai', # 0x1c
'w', # 0x1d
'\'', # 0x1e
't', # 0x1f
'k', # 0x20
'sh', # 0x21
's', # 0x22
'n', # 0x23
'w', # 0x24
'n', # 0x25
'[?]', # 0x26
'w', # 0x27
'c', # 0x28
'?', # 0x29
'l', # 0x2a
'en', # 0x2b
'in', # 0x2c
'on', # 0x2d
'an', # 0x2e
'pe', # 0x2f
'paai', # 0x30
'pi', # 0x31
'pii', # 0x32
'po', # 0x33
'poo', # 0x34
'poo', # 0x35
'hee', # 0x36
'hi', # 0x37
'pa', # 0x38
'paa', # 0x39
'pwe', # 0x3a
'pwe', # 0x3b
'pwi', # 0x3c
'pwi', # 0x3d
'pwii', # 0x3e
'pwii', # 0x3f
'pwo', # 0x40
'pwo', # 0x41
'pwoo', # 0x42
'pwoo', # 0x43
'pwa', # 0x44
'pwa', # 0x45
'pwaa', # 0x46
'pwaa', # 0x47
'pwaa', # 0x48
'p', # 0x49
'p', # 0x4a
'h', # 0x4b
'te', # 0x4c
'taai', # 0x4d
'ti', # 0x4e
'tii', # 0x4f
'to', # 0x50
'too', # 0x51
'too', # 0x52
'dee', # 0x53
'di', # 0x54
'ta', # 0x55
'taa', # 0x56
'twe', # 0x57
'twe', # 0x58
'twi', # 0x59
'twi', # 0x5a
'twii', # 0x5b
'twii', # 0x5c
'two', # 0x5d
'two', # 0x5e
'twoo', # 0x5f
'twoo', # 0x60
'twa', # 0x61
'twa', # 0x62
'twaa', # 0x63
'twaa', # 0x64
'twaa', # 0x65
't', # 0x66
'tte', # 0x67
'tti', # 0x68
'tto', # 0x69
'tta', # 0x6a
'ke', # 0x6b
'kaai', # 0x6c
'ki', # 0x6d
'kii', # 0x6e
'ko', # 0x6f
'koo', # 0x70
'koo', # 0x71
'ka', # 0x72
'kaa', # 0x73
'kwe', # 0x74
'kwe', # 0x75
'kwi', # 0x76
'kwi', # 0x77
'kwii', # 0x78
'kwii', # 0x79
'kwo', # 0x7a
'kwo', # 0x7b
'kwoo', # 0x7c
'kwoo', # 0x7d
'kwa', # 0x7e
'kwa', # 0x7f
'kwaa', # 0x80
'kwaa', # 0x81
'kwaa', # 0x82
'k', # 0x83
'kw', # 0x84
'keh', # 0x85
'kih', # 0x86
'koh', # 0x87
'kah', # 0x88
'ce', # 0x89
'caai', # 0x8a
'ci', # 0x8b
'cii', # 0x8c
'co', # 0x8d
'coo', # 0x8e
'coo', # 0x8f
'ca', # 0x90
'caa', # 0x91
'cwe', # 0x92
'cwe', # 0x93
'cwi', # 0x94
'cwi', # 0x95
'cwii', # 0x96
'cwii', # 0x97
'cwo', # 0x98
'cwo', # 0x99
'cwoo', # 0x9a
'cwoo', # 0x9b
'cwa', # 0x9c
'cwa', # 0x9d
'cwaa', # 0x9e
'cwaa', # 0x9f
'cwaa', # 0xa0
'c', # 0xa1
'th', # 0xa2
'me', # 0xa3
'maai', # 0xa4
'mi', # 0xa5
'mii', # 0xa6
'mo', # 0xa7
'moo', # 0xa8
'moo', # 0xa9
'ma', # 0xaa
'maa', # 0xab
'mwe', # 0xac
'mwe', # 0xad
'mwi', # 0xae
'mwi', # 0xaf
'mwii', # 0xb0
'mwii', # 0xb1
'mwo', # 0xb2
'mwo', # 0xb3
'mwoo', # 0xb4
'mwoo', # 0xb5
'mwa', # 0xb6
'mwa', # 0xb7
'mwaa', # 0xb8
'mwaa', # 0xb9
'mwaa', # 0xba
'm', # 0xbb
'm', # 0xbc
'mh', # 0xbd
'm', # 0xbe
'm', # 0xbf
'ne', # 0xc0
'naai', # 0xc1
'ni', # 0xc2
'nii', # 0xc3
'no', # 0xc4
'noo', # 0xc5
'noo', # 0xc6
'na', # 0xc7
'naa', # 0xc8
'nwe', # 0xc9
'nwe', # 0xca
'nwa', # 0xcb
'nwa', # 0xcc
'nwaa', # 0xcd
'nwaa', # 0xce
'nwaa', # 0xcf
'n', # 0xd0
'ng', # 0xd1
'nh', # 0xd2
'le', # 0xd3
'laai', # 0xd4
'li', # 0xd5
'lii', # 0xd6
'lo', # 0xd7
'loo', # 0xd8
'loo', # 0xd9
'la', # 0xda
'laa', # 0xdb
'lwe', # 0xdc
'lwe', # 0xdd
'lwi', # 0xde
'lwi', # 0xdf
'lwii', # 0xe0
'lwii', # 0xe1
'lwo', # 0xe2
'lwo', # 0xe3
'lwoo', # 0xe4
'lwoo', # 0xe5
'lwa', # 0xe6
'lwa', # 0xe7
'lwaa', # 0xe8
'lwaa', # 0xe9
'l', # 0xea
'l', # 0xeb
'l', # 0xec
'se', # 0xed
'saai', # 0xee
'si', # 0xef
'sii', # 0xf0
'so', # 0xf1
'soo', # 0xf2
'soo', # 0xf3
'sa', # 0xf4
'saa', # 0xf5
'swe', # 0xf6
'swe', # 0xf7
'swi', # 0xf8
'swi', # 0xf9
'swii', # 0xfa
'swii', # 0xfb
'swo', # 0xfc
'swo', # 0xfd
'swoo', # 0xfe
'swoo', # 0xff
)
| gpl-2.0 | -1,328,070,727,552,427,500 | 6,387,550,881,977,901,000 | 15.666667 | 17 | 0.40093 | false |
Changaco/oh-mainline | vendor/packages/gdata/tests/atom_tests/mock_client_test.py | 128 | 2421 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Jeff Scudder)'
import unittest
import atom.mock_http
import atom.http
class MockHttpClientUnitTest(unittest.TestCase):
def setUp(self):
self.client = atom.mock_http.MockHttpClient()
def testRepondToGet(self):
mock_response = atom.http_interface.HttpResponse(body='Hooray!',
status=200, reason='OK')
self.client.add_response(mock_response, 'GET',
'http://example.com/hooray')
response = self.client.request('GET', 'http://example.com/hooray')
self.assertEquals(len(self.client.recordings), 1)
self.assertEquals(response.status, 200)
self.assertEquals(response.read(), 'Hooray!')
def testRecordResponse(self):
# Turn on pass-through record mode.
self.client.real_client = atom.http.ProxiedHttpClient()
live_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
live_response_body = live_response.read()
self.assertEquals(live_response.status, 200)
self.assertEquals(live_response_body.startswith('<?xml'), True)
# Requery for the now canned data.
self.client.real_client = None
canned_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
# The canned response should be the stored response.
canned_response_body = canned_response.read()
self.assertEquals(canned_response.status, 200)
self.assertEquals(canned_response_body, live_response_body)
def testUnrecordedRequest(self):
try:
self.client.request('POST', 'http://example.org')
self.fail()
except atom.mock_http.NoRecordingFound:
pass
def suite():
return unittest.TestSuite(
(unittest.makeSuite(MockHttpClientUnitTest,'test'),))
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | -5,439,971,732,730,720,000 | 2,024,169,006,358,076,700 | 30.855263 | 74 | 0.706733 | false |
muzili/repo | main.py | 18 | 6629 | #!/bin/sh
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
magic='--calling-python-from-/bin/sh--'
"""exec" python -E "$0" "$@" """#$magic"
if __name__ == '__main__':
import sys
if sys.argv[-1] == '#%s' % magic:
del sys.argv[-1]
del magic
import optparse
import os
import re
import sys
from trace import SetTrace
from git_config import init_ssh, close_ssh
from command import InteractiveCommand
from command import MirrorSafeCommand
from command import PagedCommand
from error import ManifestInvalidRevisionError
from error import NoSuchProjectError
from error import RepoChangedException
from pager import RunPager
from subcmds import all as all_commands
global_options = optparse.OptionParser(
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]"
)
global_options.add_option('-p', '--paginate',
dest='pager', action='store_true',
help='display command output in the pager')
global_options.add_option('--no-pager',
dest='no_pager', action='store_true',
help='disable the pager')
global_options.add_option('--trace',
dest='trace', action='store_true',
help='trace git command execution')
global_options.add_option('--version',
dest='show_version', action='store_true',
help='display this version of repo')
class _Repo(object):
def __init__(self, repodir):
self.repodir = repodir
self.commands = all_commands
# add 'branch' as an alias for 'branches'
all_commands['branch'] = all_commands['branches']
def _Run(self, argv):
name = None
glob = []
for i in xrange(0, len(argv)):
if not argv[i].startswith('-'):
name = argv[i]
if i > 0:
glob = argv[:i]
argv = argv[i + 1:]
break
if not name:
glob = argv
name = 'help'
argv = []
gopts, gargs = global_options.parse_args(glob)
if gopts.trace:
SetTrace()
if gopts.show_version:
if name == 'help':
name = 'version'
else:
print >>sys.stderr, 'fatal: invalid usage of --version'
sys.exit(1)
try:
cmd = self.commands[name]
except KeyError:
print >>sys.stderr,\
"repo: '%s' is not a repo command. See 'repo help'."\
% name
sys.exit(1)
cmd.repodir = self.repodir
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
print >>sys.stderr, \
"fatal: '%s' requires a working directory"\
% name
sys.exit(1)
copts, cargs = cmd.OptionParser.parse_args(argv)
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
config = cmd.manifest.globalConfig
if gopts.pager:
use_pager = True
else:
use_pager = config.GetBoolean('pager.%s' % name)
if use_pager is None:
use_pager = cmd.WantPager(copts)
if use_pager:
RunPager(config)
try:
cmd.Execute(copts, cargs)
except ManifestInvalidRevisionError, e:
print >>sys.stderr, 'error: %s' % str(e)
sys.exit(1)
except NoSuchProjectError, e:
if e.name:
print >>sys.stderr, 'error: project %s not found' % e.name
else:
print >>sys.stderr, 'error: no project in current directory'
sys.exit(1)
def _MyWrapperPath():
return os.path.join(os.path.dirname(__file__), 'repo')
def _CurrentWrapperVersion():
VERSION = None
pat = re.compile(r'^VERSION *=')
fd = open(_MyWrapperPath())
for line in fd:
if pat.match(line):
fd.close()
exec line
return VERSION
raise NameError, 'No VERSION in repo script'
def _CheckWrapperVersion(ver, repo_path):
if not repo_path:
repo_path = '~/bin/repo'
if not ver:
print >>sys.stderr, 'no --wrapper-version argument'
sys.exit(1)
exp = _CurrentWrapperVersion()
ver = tuple(map(lambda x: int(x), ver.split('.')))
if len(ver) == 1:
ver = (0, ver[0])
if exp[0] > ver[0] or ver < (0, 4):
exp_str = '.'.join(map(lambda x: str(x), exp))
print >>sys.stderr, """
!!! A new repo command (%5s) is available. !!!
!!! You must upgrade before you can continue: !!!
cp %s %s
""" % (exp_str, _MyWrapperPath(), repo_path)
sys.exit(1)
if exp > ver:
exp_str = '.'.join(map(lambda x: str(x), exp))
print >>sys.stderr, """
... A new repo command (%5s) is available.
... You should upgrade soon:
cp %s %s
""" % (exp_str, _MyWrapperPath(), repo_path)
def _CheckRepoDir(dir):
if not dir:
print >>sys.stderr, 'no --repo-dir argument'
sys.exit(1)
def _PruneOptions(argv, opt):
i = 0
while i < len(argv):
a = argv[i]
if a == '--':
break
if a.startswith('--'):
eq = a.find('=')
if eq > 0:
a = a[0:eq]
if not opt.has_option(a):
del argv[i]
continue
i += 1
def _Main(argv):
opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
opt.add_option("--repo-dir", dest="repodir",
help="path to .repo/")
opt.add_option("--wrapper-version", dest="wrapper_version",
help="version of the wrapper script")
opt.add_option("--wrapper-path", dest="wrapper_path",
help="location of the wrapper script")
_PruneOptions(argv, opt)
opt, argv = opt.parse_args(argv)
_CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path)
_CheckRepoDir(opt.repodir)
repo = _Repo(opt.repodir)
try:
try:
init_ssh()
repo._Run(argv)
finally:
close_ssh()
except KeyboardInterrupt:
sys.exit(1)
except RepoChangedException, rce:
# If repo changed, re-exec ourselves.
#
argv = list(sys.argv)
argv.extend(rce.extra_args)
try:
os.execv(__file__, argv)
except OSError, e:
print >>sys.stderr, 'fatal: cannot restart repo after upgrade'
print >>sys.stderr, 'fatal: %s' % e
sys.exit(128)
if __name__ == '__main__':
_Main(sys.argv[1:])
| apache-2.0 | 7,413,209,845,508,081,000 | 2,085,369,189,725,470,500 | 27.32906 | 74 | 0.599336 | false |
juanshishido/okcupid | utils/permutation.py | 1 | 2439 | import numpy as np
from scipy.stats import ttest_ind
from sklearn.metrics import accuracy_score
def _diff_means(m, arr):
"""Calculate the difference-in-means statistic.
This is based on an input array, `arr`, where the first
`m` observations correspond to a particular class.
Parameters
----------
m : int
Number of samples in the first class
arr : np.ndarray
Data for both classes
Returns
-------
float
"""
return np.mean(arr[:m]) - np.mean(arr[m:])
def _permute(a, b, comparison='predictions', permutations=10000):
"""Estimate of the permutation-based p-value
Parameters
----------
a : np.ndarray
Data for one class or
ground truth (correct) labels
b : np.ndarray
Data for another class or
predicted labels, as returned by a classifier
comparison : str
{'predictions', 'means'}
permutations : int, optional
Number of permutations
Returns
-------
p_value : float
The proportion of times a value as extreme
as the observed estimate is seen
Notes
-----
This calculates the two-tailed p-value
"""
assert comparison in ('predictions', 'means')
np.random.seed(42)
if comparison == 'predictions':
c = b.copy()
compare = accuracy_score
else:
c = np.append(a, b)
a = a.shape[0]
compare = _diff_means
baseline = compare(a, c)
v = []
for _ in range(permutations):
np.random.shuffle(c)
v.append(compare(a, c))
p_value = (np.abs(np.array(v)) >= np.abs(baseline)).sum() / permutations
return p_value
def print_pvalues(a, b):
"""Wrapper function for printing meand and p-values
both permutation-based and classical
Parameters
----------
a : np.ndarray
Data for one class or
ground truth (correct) labels
b : np.ndarray
Data for another class or
predicted labels, as returned by a classifier
Returns
-------
None
"""
assert isinstance(a, np.ndarray) and isinstance(b, np.ndarray)
rnd = lambda x: np.round(x, 8)
permutation = _permute(a, b, 'means')
classical = ttest_ind(a, b, equal_var=False)[1]
print("[means] 'a':", rnd(a.mean()), "'b':", rnd(b.mean()))
print("p-values:")
print(" [permutation]:", rnd(permutation))
print(" [classical]: ", rnd(classical))
| mit | 4,657,792,125,948,165,000 | -5,755,618,526,227,099,000 | 25.802198 | 76 | 0.595326 | false |
geligaoli/shadowsocks-1 | shadowsocks/common.py | 9 | 10391 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import socket
import struct
import logging
import binascii
def compat_ord(s):
if type(s) == int:
return s
return _ord(s)
def compat_chr(d):
if bytes == str:
return _chr(d)
return bytes([d])
_ord = ord
_chr = chr
ord = compat_ord
chr = compat_chr
def to_bytes(s):
if bytes != str:
if type(s) == str:
return s.encode('utf-8')
return s
def to_str(s):
if bytes != str:
if type(s) == bytes:
return s.decode('utf-8')
return s
def inet_ntop(family, ipstr):
if family == socket.AF_INET:
return to_bytes(socket.inet_ntoa(ipstr))
elif family == socket.AF_INET6:
import re
v6addr = ':'.join(('%02X%02X' % (ord(i), ord(j))).lstrip('0')
for i, j in zip(ipstr[::2], ipstr[1::2]))
v6addr = re.sub('::+', '::', v6addr, count=1)
return to_bytes(v6addr)
def inet_pton(family, addr):
addr = to_str(addr)
if family == socket.AF_INET:
return socket.inet_aton(addr)
elif family == socket.AF_INET6:
if '.' in addr: # a v4 addr
v4addr = addr[addr.rindex(':') + 1:]
v4addr = socket.inet_aton(v4addr)
v4addr = map(lambda x: ('%02X' % ord(x)), v4addr)
v4addr.insert(2, ':')
newaddr = addr[:addr.rindex(':') + 1] + ''.join(v4addr)
return inet_pton(family, newaddr)
dbyts = [0] * 8 # 8 groups
grps = addr.split(':')
for i, v in enumerate(grps):
if v:
dbyts[i] = int(v, 16)
else:
for j, w in enumerate(grps[::-1]):
if w:
dbyts[7 - j] = int(w, 16)
else:
break
break
return b''.join((chr(i // 256) + chr(i % 256)) for i in dbyts)
else:
raise RuntimeError("What family?")
def is_ip(address):
for family in (socket.AF_INET, socket.AF_INET6):
try:
if type(address) != str:
address = address.decode('utf8')
inet_pton(family, address)
return family
except (TypeError, ValueError, OSError, IOError):
pass
return False
def patch_socket():
if not hasattr(socket, 'inet_pton'):
socket.inet_pton = inet_pton
if not hasattr(socket, 'inet_ntop'):
socket.inet_ntop = inet_ntop
patch_socket()
ADDRTYPE_IPV4 = 1
ADDRTYPE_IPV6 = 4
ADDRTYPE_HOST = 3
def pack_addr(address):
address_str = to_str(address)
for family in (socket.AF_INET, socket.AF_INET6):
try:
r = socket.inet_pton(family, address_str)
if family == socket.AF_INET6:
return b'\x04' + r
else:
return b'\x01' + r
except (TypeError, ValueError, OSError, IOError):
pass
if len(address) > 255:
address = address[:255] # TODO
return b'\x03' + chr(len(address)) + address
def pre_parse_header(data):
datatype = ord(data[0])
if datatype == 0x80:
if len(data) <= 2:
return None
rand_data_size = ord(data[1])
if rand_data_size + 2 >= len(data):
logging.warn('header too short, maybe wrong password or '
'encryption method')
return None
data = data[rand_data_size + 2:]
elif datatype == 0x81:
data = data[1:]
elif datatype == 0x82:
if len(data) <= 3:
return None
rand_data_size = struct.unpack('>H', data[1:3])[0]
if rand_data_size + 3 >= len(data):
logging.warn('header too short, maybe wrong password or '
'encryption method')
return None
data = data[rand_data_size + 3:]
elif datatype == 0x88:
if len(data) <= 7 + 7:
return None
data_size = struct.unpack('>H', data[1:3])[0]
ogn_data = data
data = data[:data_size]
crc = binascii.crc32(data) & 0xffffffff
if crc != 0xffffffff:
logging.warn('uncorrect CRC32, maybe wrong password or '
'encryption method')
return None
start_pos = 3 + ord(data[3])
data = data[start_pos:-4]
if data_size < len(ogn_data):
data += ogn_data[data_size:]
return data
def parse_header(data):
addrtype = ord(data[0])
dest_addr = None
dest_port = None
header_length = 0
connecttype = (addrtype & 0x10) and 1 or 0
addrtype &= ~0x10
if addrtype == ADDRTYPE_IPV4:
if len(data) >= 7:
dest_addr = socket.inet_ntoa(data[1:5])
dest_port = struct.unpack('>H', data[5:7])[0]
header_length = 7
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_HOST:
if len(data) > 2:
addrlen = ord(data[1])
if len(data) >= 2 + addrlen:
dest_addr = data[2:2 + addrlen]
dest_port = struct.unpack('>H', data[2 + addrlen:4 +
addrlen])[0]
header_length = 4 + addrlen
else:
logging.warn('header is too short')
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_IPV6:
if len(data) >= 19:
dest_addr = socket.inet_ntop(socket.AF_INET6, data[1:17])
dest_port = struct.unpack('>H', data[17:19])[0]
header_length = 19
else:
logging.warn('header is too short')
else:
logging.warn('unsupported addrtype %d, maybe wrong password or '
'encryption method' % addrtype)
if dest_addr is None:
return None
return connecttype, to_bytes(dest_addr), dest_port, header_length
class IPNetwork(object):
ADDRLENGTH = {socket.AF_INET: 32, socket.AF_INET6: 128, False: 0}
def __init__(self, addrs):
self._network_list_v4 = []
self._network_list_v6 = []
if type(addrs) == str:
addrs = addrs.split(',')
list(map(self.add_network, addrs))
def add_network(self, addr):
if addr is "":
return
block = addr.split('/')
addr_family = is_ip(block[0])
addr_len = IPNetwork.ADDRLENGTH[addr_family]
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(block[0]))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, block[0]))
ip = (hi << 64) | lo
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if len(block) is 1:
prefix_size = 0
while (ip & 1) == 0 and ip is not 0:
ip >>= 1
prefix_size += 1
logging.warn("You did't specify CIDR routing prefix size for %s, "
"implicit treated as %s/%d" % (addr, addr, addr_len))
elif block[1].isdigit() and int(block[1]) <= addr_len:
prefix_size = addr_len - int(block[1])
ip >>= prefix_size
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if addr_family is socket.AF_INET:
self._network_list_v4.append((ip, prefix_size))
else:
self._network_list_v6.append((ip, prefix_size))
def __contains__(self, addr):
addr_family = is_ip(addr)
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(addr))
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v4))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, addr))
ip = (hi << 64) | lo
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v6))
else:
return False
def test_inet_conv():
ipv4 = b'8.8.4.4'
b = inet_pton(socket.AF_INET, ipv4)
assert inet_ntop(socket.AF_INET, b) == ipv4
ipv6 = b'2404:6800:4005:805::1011'
b = inet_pton(socket.AF_INET6, ipv6)
assert inet_ntop(socket.AF_INET6, b) == ipv6
def test_parse_header():
assert parse_header(b'\x03\x0ewww.google.com\x00\x50') == \
(3, b'www.google.com', 80, 18)
assert parse_header(b'\x01\x08\x08\x08\x08\x00\x35') == \
(1, b'8.8.8.8', 53, 7)
assert parse_header((b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00'
b'\x00\x10\x11\x00\x50')) == \
(4, b'2404:6800:4005:805::1011', 80, 19)
def test_pack_header():
assert pack_addr(b'8.8.8.8') == b'\x01\x08\x08\x08\x08'
assert pack_addr(b'2404:6800:4005:805::1011') == \
b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00\x00\x10\x11'
assert pack_addr(b'www.google.com') == b'\x03\x0ewww.google.com'
def test_ip_network():
ip_network = IPNetwork('127.0.0.0/24,::ff:1/112,::1,192.168.1.1,192.0.2.0')
assert '127.0.0.1' in ip_network
assert '127.0.1.1' not in ip_network
assert ':ff:ffff' in ip_network
assert '::ffff:1' not in ip_network
assert '::1' in ip_network
assert '::2' not in ip_network
assert '192.168.1.1' in ip_network
assert '192.168.1.2' not in ip_network
assert '192.0.2.1' in ip_network
assert '192.0.3.1' in ip_network # 192.0.2.0 is treated as 192.0.2.0/23
assert 'www.google.com' not in ip_network
if __name__ == '__main__':
test_inet_conv()
test_parse_header()
test_pack_header()
test_ip_network()
| apache-2.0 | 3,597,558,416,224,879,000 | 3,683,729,370,473,562,000 | 31.370717 | 79 | 0.540179 | false |
Juniper/nova | nova/tests/functional/wsgi/test_flavor_manage.py | 4 | 9812 | # Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testscenarios
from nova import context
from nova import db
from nova import exception as ex
from nova import objects
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import integrated_helpers as helper
from nova.tests.unit import policy_fixture
def rand_flavor(**kwargs):
flav = {
'name': 'name-%s' % helper.generate_random_alphanumeric(10),
'id': helper.generate_random_alphanumeric(10),
'ram': int(helper.generate_random_numeric(2)) + 1,
'disk': int(helper.generate_random_numeric(3)),
'vcpus': int(helper.generate_random_numeric(1)) + 1,
}
flav.update(kwargs)
return flav
class FlavorManageFullstack(testscenarios.WithScenarios, test.TestCase):
"""Tests for flavors manage administrative command.
Extension: os-flavors-manage
os-flavors-manage adds a set of admin functions to the flavors
resource for the creation and deletion of flavors.
POST /v2/flavors:
::
{
'name': NAME, # string, required unique
'id': ID, # string, required unique
'ram': RAM, # in MB, required
'vcpus': VCPUS, # int value, required
'disk': DISK, # in GB, required
'OS-FLV-EXT-DATA:ephemeral', # in GB, ephemeral disk size
'is_public': IS_PUBLIC, # boolean
'swap': SWAP, # in GB?
'rxtx_factor': RXTX, # ???
}
Returns Flavor
DELETE /v2/flavors/ID
Functional Test Scope:
This test starts the wsgi stack for the nova api services, uses an
in memory database to ensure the path through the wsgi layer to
the database.
"""
_additional_fixtures = []
scenarios = [
# test v2.1 base microversion
('v2_1', {
'api_major_version': 'v2.1'}),
]
def setUp(self):
super(FlavorManageFullstack, self).setUp()
# load any additional fixtures specified by the scenario
for fix in self._additional_fixtures:
self.useFixture(fix())
self.useFixture(policy_fixture.RealPolicyFixture())
api_fixture = self.useFixture(
nova_fixtures.OSAPIFixture(
api_version=self.api_major_version))
# NOTE(sdague): because this test is primarily an admin API
# test default self.api to the admin api.
self.api = api_fixture.admin_api
self.user_api = api_fixture.api
def assertFlavorDbEqual(self, flav, flavdb):
# a mapping of the REST params to the db fields
mapping = {
'name': 'name',
'disk': 'root_gb',
'ram': 'memory_mb',
'vcpus': 'vcpus',
'id': 'flavorid',
'swap': 'swap'
}
for k, v in mapping.items():
if k in flav:
self.assertEqual(flav[k], flavdb[v],
"%s != %s" % (flav, flavdb))
def assertFlavorAPIEqual(self, flav, flavapi):
# for all keys in the flavor, ensure they are correctly set in
# flavapi response.
for k in flav:
if k in flavapi:
self.assertEqual(flav[k], flavapi[k],
"%s != %s" % (flav, flavapi))
else:
self.fail("Missing key: %s in flavor: %s" % (k, flavapi))
def assertFlavorInList(self, flav, flavlist):
for item in flavlist['flavors']:
if flav['id'] == item['id']:
self.assertEqual(flav['name'], item['name'])
return
self.fail("%s not found in %s" % (flav, flavlist))
def assertFlavorNotInList(self, flav, flavlist):
for item in flavlist['flavors']:
if flav['id'] == item['id']:
self.fail("%s found in %s" % (flav, flavlist))
def test_flavor_manage_func_negative(self):
"""Test flavor manage edge conditions.
- Bogus body is a 400
- Unknown flavor is a 404
- Deleting unknown flavor is a 404
"""
# Test for various API failure conditions
# bad body is 400
resp = self.api.api_post('flavors', '', check_response_status=False)
self.assertEqual(400, resp.status)
# get unknown flavor is 404
resp = self.api.api_delete('flavors/foo', check_response_status=False)
self.assertEqual(404, resp.status)
# delete unknown flavor is 404
resp = self.api.api_delete('flavors/foo', check_response_status=False)
self.assertEqual(404, resp.status)
ctx = context.get_admin_context()
# bounds conditions - invalid vcpus
flav = {'flavor': rand_flavor(vcpus=0)}
resp = self.api.api_post('flavors', flav, check_response_status=False)
self.assertEqual(400, resp.status, resp)
# ... and ensure that we didn't leak it into the db
self.assertRaises(ex.FlavorNotFound,
objects.Flavor.get_by_flavor_id,
ctx, flav['flavor']['id'])
# bounds conditions - invalid ram
flav = {'flavor': rand_flavor(ram=0)}
resp = self.api.api_post('flavors', flav, check_response_status=False)
self.assertEqual(400, resp.status)
# ... and ensure that we didn't leak it into the db
self.assertRaises(ex.FlavorNotFound,
objects.Flavor.get_by_flavor_id,
ctx, flav['flavor']['id'])
# NOTE(sdague): if there are other bounds conditions that
# should be checked, stack them up here.
def test_flavor_manage_deleted(self):
"""Ensure the behavior around a deleted flavor is stable.
- Fetching a deleted flavor works, and returns the flavor info.
- Listings should not contain deleted flavors
"""
# create a deleted flavor
new_flav = {'flavor': rand_flavor()}
self.api.api_post('flavors', new_flav)
self.api.api_delete('flavors/%s' % new_flav['flavor']['id'])
# deleted flavor should not show up in a list
resp = self.api.api_get('flavors')
self.assertFlavorNotInList(new_flav['flavor'], resp.body)
def test_flavor_create_frozen(self):
ctx = context.get_admin_context()
db.flavor_create(ctx, {
'name': 'foo', 'memory_mb': 512, 'vcpus': 1,
'root_gb': 1, 'ephemeral_gb': 0, 'flavorid': 'foo',
'swap': 0, 'rxtx_factor': 1.0, 'vcpu_weight': 1,
'disabled': False, 'is_public': True,
})
new_flav = {'flavor': rand_flavor()}
resp = self.api.api_post('flavors', new_flav,
check_response_status=False)
self.assertEqual(409, resp.status)
def test_flavor_manage_func(self):
"""Basic flavor creation lifecycle testing.
- Creating a flavor
- Ensure it's in the database
- Ensure it's in the listing
- Delete it
- Ensure it's hidden in the database
"""
ctx = context.get_admin_context()
flav1 = {
'flavor': rand_flavor(),
}
# Create flavor and ensure it made it to the database
self.api.api_post('flavors', flav1)
flav1db = objects.Flavor.get_by_flavor_id(ctx, flav1['flavor']['id'])
self.assertFlavorDbEqual(flav1['flavor'], flav1db)
# Ensure new flavor is seen in the listing
resp = self.api.api_get('flavors')
self.assertFlavorInList(flav1['flavor'], resp.body)
# Delete flavor and ensure it was removed from the database
self.api.api_delete('flavors/%s' % flav1['flavor']['id'])
self.assertRaises(ex.FlavorNotFound,
objects.Flavor.get_by_flavor_id,
ctx, flav1['flavor']['id'])
resp = self.api.api_delete('flavors/%s' % flav1['flavor']['id'],
check_response_status=False)
self.assertEqual(404, resp.status)
def test_flavor_manage_permissions(self):
"""Ensure that regular users can't create or delete flavors.
"""
ctx = context.get_admin_context()
flav1 = {'flavor': rand_flavor()}
# Ensure user can't create flavor
resp = self.user_api.api_post('flavors', flav1,
check_response_status=False)
self.assertEqual(403, resp.status)
# ... and that it didn't leak through
self.assertRaises(ex.FlavorNotFound,
objects.Flavor.get_by_flavor_id,
ctx, flav1['flavor']['id'])
# Create the flavor as the admin user
self.api.api_post('flavors', flav1)
# Ensure user can't delete flavors from our cloud
resp = self.user_api.api_delete('flavors/%s' % flav1['flavor']['id'],
check_response_status=False)
self.assertEqual(403, resp.status)
# ... and ensure that we didn't actually delete the flavor,
# this will throw an exception if we did.
objects.Flavor.get_by_flavor_id(ctx, flav1['flavor']['id'])
| apache-2.0 | -2,924,443,214,079,518,700 | -8,753,982,319,551,613,000 | 35.073529 | 78 | 0.586527 | false |
willharris/django | django/core/serializers/__init__.py | 121 | 8167 | """
Interfaces for serializing Django objects.
Usage::
from django.core import serializers
json = serializers.serialize("json", some_queryset)
objects = list(serializers.deserialize("json", json))
To add your own serializers, use the SERIALIZATION_MODULES setting::
SERIALIZATION_MODULES = {
"csv": "path.to.csv.serializer",
"txt": "path.to.txt.serializer",
}
"""
import importlib
from django.apps import apps
from django.conf import settings
from django.utils import six
from django.core.serializers.base import SerializerDoesNotExist
# Built-in serializers
BUILTIN_SERIALIZERS = {
"xml": "django.core.serializers.xml_serializer",
"python": "django.core.serializers.python",
"json": "django.core.serializers.json",
"yaml": "django.core.serializers.pyyaml",
}
_serializers = {}
class BadSerializer(object):
"""
Stub serializer to hold exception raised during registration
This allows the serializer registration to cache serializers and if there
is an error raised in the process of creating a serializer it will be
raised and passed along to the caller when the serializer is used.
"""
internal_use_only = False
def __init__(self, exception):
self.exception = exception
def __call__(self, *args, **kwargs):
raise self.exception
def register_serializer(format, serializer_module, serializers=None):
"""Register a new serializer.
``serializer_module`` should be the fully qualified module name
for the serializer.
If ``serializers`` is provided, the registration will be added
to the provided dictionary.
If ``serializers`` is not provided, the registration will be made
directly into the global register of serializers. Adding serializers
directly is not a thread-safe operation.
"""
if serializers is None and not _serializers:
_load_serializers()
try:
module = importlib.import_module(serializer_module)
except ImportError as exc:
bad_serializer = BadSerializer(exc)
module = type('BadSerializerModule', (object,), {
'Deserializer': bad_serializer,
'Serializer': bad_serializer,
})
if serializers is None:
_serializers[format] = module
else:
serializers[format] = module
def unregister_serializer(format):
"Unregister a given serializer. This is not a thread-safe operation."
if not _serializers:
_load_serializers()
if format not in _serializers:
raise SerializerDoesNotExist(format)
del _serializers[format]
def get_serializer(format):
if not _serializers:
_load_serializers()
if format not in _serializers:
raise SerializerDoesNotExist(format)
return _serializers[format].Serializer
def get_serializer_formats():
if not _serializers:
_load_serializers()
return list(_serializers)
def get_public_serializer_formats():
if not _serializers:
_load_serializers()
return [k for k, v in six.iteritems(_serializers) if not v.Serializer.internal_use_only]
def get_deserializer(format):
if not _serializers:
_load_serializers()
if format not in _serializers:
raise SerializerDoesNotExist(format)
return _serializers[format].Deserializer
def serialize(format, queryset, **options):
"""
Serialize a queryset (or any iterator that returns database objects) using
a certain serializer.
"""
s = get_serializer(format)()
s.serialize(queryset, **options)
return s.getvalue()
def deserialize(format, stream_or_string, **options):
"""
Deserialize a stream or a string. Returns an iterator that yields ``(obj,
m2m_relation_dict)``, where ``obj`` is an instantiated -- but *unsaved* --
object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
list_of_related_objects}``.
"""
d = get_deserializer(format)
return d(stream_or_string, **options)
def _load_serializers():
"""
Register built-in and settings-defined serializers. This is done lazily so
that user code has a chance to (e.g.) set up custom settings without
needing to be careful of import order.
"""
global _serializers
serializers = {}
for format in BUILTIN_SERIALIZERS:
register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
if hasattr(settings, "SERIALIZATION_MODULES"):
for format in settings.SERIALIZATION_MODULES:
register_serializer(format, settings.SERIALIZATION_MODULES[format], serializers)
_serializers = serializers
def sort_dependencies(app_list):
"""Sort a list of (app_config, models) pairs into a single list of models.
The single list of models is sorted so that any model with a natural key
is serialized before a normal model, and any model with a natural key
dependency has it's dependencies serialized first.
"""
# Process the list of models, and get the list of dependencies
model_dependencies = []
models = set()
for app_config, model_list in app_list:
if model_list is None:
model_list = app_config.get_models()
for model in model_list:
models.add(model)
# Add any explicitly defined dependencies
if hasattr(model, 'natural_key'):
deps = getattr(model.natural_key, 'dependencies', [])
if deps:
deps = [apps.get_model(dep) for dep in deps]
else:
deps = []
# Now add a dependency for any FK relation with a model that
# defines a natural key
for field in model._meta.fields:
if hasattr(field.rel, 'to'):
rel_model = field.rel.to
if hasattr(rel_model, 'natural_key') and rel_model != model:
deps.append(rel_model)
# Also add a dependency for any simple M2M relation with a model
# that defines a natural key. M2M relations with explicit through
# models don't count as dependencies.
for field in model._meta.many_to_many:
if field.rel.through._meta.auto_created:
rel_model = field.rel.to
if hasattr(rel_model, 'natural_key') and rel_model != model:
deps.append(rel_model)
model_dependencies.append((model, deps))
model_dependencies.reverse()
# Now sort the models to ensure that dependencies are met. This
# is done by repeatedly iterating over the input list of models.
# If all the dependencies of a given model are in the final list,
# that model is promoted to the end of the final list. This process
# continues until the input list is empty, or we do a full iteration
# over the input models without promoting a model to the final list.
# If we do a full iteration without a promotion, that means there are
# circular dependencies in the list.
model_list = []
while model_dependencies:
skipped = []
changed = False
while model_dependencies:
model, deps = model_dependencies.pop()
# If all of the models in the dependency list are either already
# on the final model list, or not on the original serialization list,
# then we've found another model with all it's dependencies satisfied.
found = True
for candidate in ((d not in models or d in model_list) for d in deps):
if not candidate:
found = False
if found:
model_list.append(model)
changed = True
else:
skipped.append((model, deps))
if not changed:
raise RuntimeError("Can't resolve dependencies for %s in serialized app list." %
', '.join('%s.%s' % (model._meta.app_label, model._meta.object_name)
for model, deps in sorted(skipped, key=lambda obj: obj[0].__name__))
)
model_dependencies = skipped
return model_list
| bsd-3-clause | -3,962,337,581,952,387,600 | -112,765,565,441,209,180 | 33.605932 | 92 | 0.644912 | false |
cruzegoodin/TSC-ShippingDetails | flask/lib/python2.7/site-packages/pbr/git.py | 25 | 9311 | # Copyright 2011 OpenStack LLC.
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import distutils.errors
from distutils import log
import io
import os
import re
import subprocess
import pkg_resources
from pbr import options
def _run_shell_command(cmd, throw_on_error=False, buffer=True, env=None):
if buffer:
out_location = subprocess.PIPE
err_location = subprocess.PIPE
else:
out_location = None
err_location = None
newenv = os.environ.copy()
if env:
newenv.update(env)
output = subprocess.Popen(cmd,
stdout=out_location,
stderr=err_location,
env=newenv)
out = output.communicate()
if output.returncode and throw_on_error:
raise distutils.errors.DistutilsError(
"%s returned %d" % (cmd, output.returncode))
if len(out) == 0 or not out[0] or not out[0].strip():
return ''
return out[0].strip().decode('utf-8')
def _run_git_command(cmd, git_dir, **kwargs):
if not isinstance(cmd, (list, tuple)):
cmd = [cmd]
return _run_shell_command(
['git', '--git-dir=%s' % git_dir] + cmd, **kwargs)
def _get_git_directory():
return _run_shell_command(['git', 'rev-parse', '--git-dir'])
def _git_is_installed():
try:
# We cannot use 'which git' as it may not be available
# in some distributions, So just try 'git --version'
# to see if we run into trouble
_run_shell_command(['git', '--version'])
except OSError:
return False
return True
def _get_highest_tag(tags):
"""Find the highest tag from a list.
Pass in a list of tag strings and this will return the highest
(latest) as sorted by the pkg_resources version parser.
"""
return max(tags, key=pkg_resources.parse_version)
def _find_git_files(dirname='', git_dir=None):
"""Behave like a file finder entrypoint plugin.
We don't actually use the entrypoints system for this because it runs
at absurd times. We only want to do this when we are building an sdist.
"""
file_list = []
if git_dir is None:
git_dir = _run_git_functions()
if git_dir:
log.info("[pbr] In git context, generating filelist from git")
file_list = _run_git_command(['ls-files', '-z'], git_dir)
file_list = file_list.split(b'\x00'.decode('utf-8'))
return [f for f in file_list if f]
def _get_raw_tag_info(git_dir):
describe = _run_git_command(['describe', '--always'], git_dir)
if "-" in describe:
return describe.rsplit("-", 2)[-2]
if "." in describe:
return 0
return None
def get_is_release(git_dir):
return _get_raw_tag_info(git_dir) == 0
def _run_git_functions():
git_dir = None
if _git_is_installed():
git_dir = _get_git_directory()
return git_dir or None
def get_git_short_sha(git_dir=None):
"""Return the short sha for this repo, if it exists."""
if not git_dir:
git_dir = _run_git_functions()
if git_dir:
return _run_git_command(
['log', '-n1', '--pretty=format:%h'], git_dir)
return None
def _iter_changelog(changelog):
"""Convert a oneline log iterator to formatted strings.
:param changelog: An iterator of one line log entries like
that given by _iter_log_oneline.
:return: An iterator over (release, formatted changelog) tuples.
"""
first_line = True
current_release = None
yield current_release, "CHANGES\n=======\n\n"
for hash, tags, msg in changelog:
if tags:
current_release = _get_highest_tag(tags)
underline = len(current_release) * '-'
if not first_line:
yield current_release, '\n'
yield current_release, (
"%(tag)s\n%(underline)s\n\n" %
dict(tag=current_release, underline=underline))
if not msg.startswith("Merge "):
if msg.endswith("."):
msg = msg[:-1]
yield current_release, "* %(msg)s\n" % dict(msg=msg)
first_line = False
def _iter_log_oneline(git_dir=None, option_dict=None):
"""Iterate over --oneline log entries if possible.
This parses the output into a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples, or None if
changelog generation is disabled / not available.
"""
if not option_dict:
option_dict = {}
should_skip = options.get_boolean_option(option_dict, 'skip_changelog',
'SKIP_WRITE_GIT_CHANGELOG')
if should_skip:
return
if git_dir is None:
git_dir = _get_git_directory()
if not git_dir:
return
return _iter_log_inner(git_dir)
def _iter_log_inner(git_dir):
"""Iterate over --oneline log entries.
This parses the output intro a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples.
"""
log.info('[pbr] Generating ChangeLog')
log_cmd = ['log', '--oneline', '--decorate']
changelog = _run_git_command(log_cmd, git_dir)
for line in changelog.split('\n'):
line_parts = line.split()
if len(line_parts) < 2:
continue
# Tags are in a list contained in ()'s. If a commit
# subject that is tagged happens to have ()'s in it
# this will fail
if line_parts[1].startswith('(') and ')' in line:
msg = line.split(')')[1].strip()
else:
msg = " ".join(line_parts[1:])
if "tag:" in line:
tags = set([
tag.split(",")[0]
for tag in line.split(")")[0].split("tag: ")[1:]])
else:
tags = set()
yield line_parts[0], tags, msg
def write_git_changelog(git_dir=None, dest_dir=os.path.curdir,
option_dict=dict(), changelog=None):
"""Write a changelog based on the git changelog."""
if not changelog:
changelog = _iter_log_oneline(git_dir=git_dir, option_dict=option_dict)
if changelog:
changelog = _iter_changelog(changelog)
if not changelog:
return
log.info('[pbr] Writing ChangeLog')
new_changelog = os.path.join(dest_dir, 'ChangeLog')
# If there's already a ChangeLog and it's not writable, just use it
if (os.path.exists(new_changelog)
and not os.access(new_changelog, os.W_OK)):
return
with io.open(new_changelog, "w", encoding="utf-8") as changelog_file:
for release, content in changelog:
changelog_file.write(content)
def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()):
"""Create AUTHORS file using git commits."""
should_skip = options.get_boolean_option(option_dict, 'skip_authors',
'SKIP_GENERATE_AUTHORS')
if should_skip:
return
old_authors = os.path.join(dest_dir, 'AUTHORS.in')
new_authors = os.path.join(dest_dir, 'AUTHORS')
# If there's already an AUTHORS file and it's not writable, just use it
if (os.path.exists(new_authors)
and not os.access(new_authors, os.W_OK)):
return
log.info('[pbr] Generating AUTHORS')
ignore_emails = '(jenkins@review|infra@lists|jenkins@openstack)'
if git_dir is None:
git_dir = _get_git_directory()
if git_dir:
authors = []
# don't include jenkins email address in AUTHORS file
git_log_cmd = ['log', '--format=%aN <%aE>']
authors += _run_git_command(git_log_cmd, git_dir).split('\n')
authors = [a for a in authors if not re.search(ignore_emails, a)]
# get all co-authors from commit messages
co_authors_out = _run_git_command('log', git_dir)
co_authors = re.findall('Co-authored-by:.+', co_authors_out,
re.MULTILINE)
co_authors = [signed.split(":", 1)[1].strip()
for signed in co_authors if signed]
authors += co_authors
authors = sorted(set(authors))
with open(new_authors, 'wb') as new_authors_fh:
if os.path.exists(old_authors):
with open(old_authors, "rb") as old_authors_fh:
new_authors_fh.write(old_authors_fh.read())
new_authors_fh.write(('\n'.join(authors) + '\n')
.encode('utf-8'))
| bsd-3-clause | 8,760,141,018,430,864,000 | 7,343,012,298,034,504,000 | 32.735507 | 79 | 0.597465 | false |
NexusIS/libcloud | libcloud/test/storage/test_atmos.py | 46 | 32319 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import os.path
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import b
import libcloud.utils.files
from libcloud.common.types import LibcloudError
from libcloud.storage.base import Container, Object
from libcloud.storage.types import ContainerAlreadyExistsError, \
ContainerDoesNotExistError, \
ContainerIsNotEmptyError, \
ObjectDoesNotExistError
from libcloud.storage.drivers.atmos import AtmosConnection, AtmosDriver
from libcloud.storage.drivers.dummy import DummyIterator
from libcloud.test import StorageMockHttp, MockRawResponse
from libcloud.test.file_fixtures import StorageFileFixtures
class AtmosTests(unittest.TestCase):
def setUp(self):
AtmosDriver.connectionCls.conn_classes = (None, AtmosMockHttp)
AtmosDriver.connectionCls.rawResponseCls = AtmosMockRawResponse
AtmosDriver.path = ''
AtmosMockHttp.type = None
AtmosMockHttp.upload_created = False
AtmosMockRawResponse.type = None
self.driver = AtmosDriver('dummy', base64.b64encode(b('dummy')))
self._remove_test_file()
def tearDown(self):
self._remove_test_file()
def _remove_test_file(self):
file_path = os.path.abspath(__file__) + '.temp'
try:
os.unlink(file_path)
except OSError:
pass
def test_list_containers(self):
AtmosMockHttp.type = 'EMPTY'
containers = self.driver.list_containers()
self.assertEqual(len(containers), 0)
AtmosMockHttp.type = None
containers = self.driver.list_containers()
self.assertEqual(len(containers), 6)
def test_list_container_objects(self):
container = Container(name='test_container', extra={},
driver=self.driver)
AtmosMockHttp.type = 'EMPTY'
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 0)
AtmosMockHttp.type = None
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 2)
obj = [o for o in objects if o.name == 'not-a-container1'][0]
self.assertEqual(obj.meta_data['object_id'],
'651eae32634bf84529c74eabd555fda48c7cead6')
self.assertEqual(obj.container.name, 'test_container')
def test_get_container(self):
container = self.driver.get_container(container_name='test_container')
self.assertEqual(container.name, 'test_container')
self.assertEqual(container.extra['object_id'],
'b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9')
def test_get_container_escaped(self):
container = self.driver.get_container(
container_name='test & container')
self.assertEqual(container.name, 'test & container')
self.assertEqual(container.extra['object_id'],
'b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9')
def test_get_container_not_found(self):
try:
self.driver.get_container(container_name='not_found')
except ContainerDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_create_container_success(self):
container = self.driver.create_container(
container_name='test_create_container')
self.assertTrue(isinstance(container, Container))
self.assertEqual(container.name, 'test_create_container')
self.assertEqual(container.extra['object_id'],
'31a27b593629a3fe59f887fd973fd953e80062ce')
def test_create_container_already_exists(self):
AtmosMockHttp.type = 'ALREADY_EXISTS'
try:
self.driver.create_container(
container_name='test_create_container')
except ContainerAlreadyExistsError:
pass
else:
self.fail(
'Container already exists but an exception was not thrown')
def test_delete_container_success(self):
container = Container(name='foo_bar_container', extra={}, driver=self)
result = self.driver.delete_container(container=container)
self.assertTrue(result)
def test_delete_container_not_found(self):
AtmosMockHttp.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={}, driver=self)
try:
self.driver.delete_container(container=container)
except ContainerDoesNotExistError:
pass
else:
self.fail(
'Container does not exist but an exception was not thrown')
def test_delete_container_not_empty(self):
AtmosMockHttp.type = 'NOT_EMPTY'
container = Container(name='foo_bar_container', extra={}, driver=self)
try:
self.driver.delete_container(container=container)
except ContainerIsNotEmptyError:
pass
else:
self.fail('Container is not empty but an exception was not thrown')
def test_get_object_success(self):
obj = self.driver.get_object(container_name='test_container',
object_name='test_object')
self.assertEqual(obj.container.name, 'test_container')
self.assertEqual(obj.size, 555)
self.assertEqual(obj.hash, '6b21c4a111ac178feacf9ec9d0c71f17')
self.assertEqual(obj.extra['object_id'],
'322dce3763aadc41acc55ef47867b8d74e45c31d6643')
self.assertEqual(
obj.extra['last_modified'], 'Tue, 25 Jan 2011 22:01:49 GMT')
self.assertEqual(obj.meta_data['foo-bar'], 'test 1')
self.assertEqual(obj.meta_data['bar-foo'], 'test 2')
def test_get_object_escaped(self):
obj = self.driver.get_object(container_name='test & container',
object_name='test & object')
self.assertEqual(obj.container.name, 'test & container')
self.assertEqual(obj.size, 555)
self.assertEqual(obj.hash, '6b21c4a111ac178feacf9ec9d0c71f17')
self.assertEqual(obj.extra['object_id'],
'322dce3763aadc41acc55ef47867b8d74e45c31d6643')
self.assertEqual(
obj.extra['last_modified'], 'Tue, 25 Jan 2011 22:01:49 GMT')
self.assertEqual(obj.meta_data['foo-bar'], 'test 1')
self.assertEqual(obj.meta_data['bar-foo'], 'test 2')
def test_get_object_not_found(self):
try:
self.driver.get_object(container_name='test_container',
object_name='not_found')
except ObjectDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_delete_object_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
status = self.driver.delete_object(obj=obj)
self.assertTrue(status)
def test_delete_object_escaped_success(self):
container = Container(name='foo & bar_container', extra={},
driver=self.driver)
obj = Object(name='foo & bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
status = self.driver.delete_object(obj=obj)
self.assertTrue(status)
def test_delete_object_not_found(self):
AtmosMockHttp.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
try:
self.driver.delete_object(obj=obj)
except ObjectDoesNotExistError:
pass
else:
self.fail('Object does not exist but an exception was not thrown')
def test_download_object_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertTrue(result)
def test_download_object_escaped_success(self):
container = Container(name='foo & bar_container', extra={},
driver=self.driver)
obj = Object(name='foo & bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertTrue(result)
def test_download_object_success_not_found(self):
AtmosMockRawResponse.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container,
meta_data=None,
driver=self.driver)
destination_path = os.path.abspath(__file__) + '.temp'
try:
self.driver.download_object(
obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
except ObjectDoesNotExistError:
pass
else:
self.fail('Object does not exist but an exception was not thrown')
def test_download_object_as_stream(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
stream = self.driver.download_object_as_stream(
obj=obj, chunk_size=None)
self.assertTrue(hasattr(stream, '__iter__'))
def test_download_object_as_stream_escaped(self):
container = Container(name='foo & bar_container', extra={},
driver=self.driver)
obj = Object(name='foo & bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
stream = self.driver.download_object_as_stream(
obj=obj, chunk_size=None)
self.assertTrue(hasattr(stream, '__iter__'))
def test_upload_object_success(self):
def upload_file(self, response, file_path, chunked=False,
calculate_hash=True):
return True, 'hash343hhash89h932439jsaa89', 1000
old_func = AtmosDriver._upload_file
AtmosDriver._upload_file = upload_file
path = os.path.abspath(__file__)
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
extra = {'meta_data': {'some-value': 'foobar'}}
obj = self.driver.upload_object(file_path=path, container=container,
extra=extra, object_name=object_name)
self.assertEqual(obj.name, 'ftu')
self.assertEqual(obj.size, 1000)
self.assertTrue('some-value' in obj.meta_data)
AtmosDriver._upload_file = old_func
def test_upload_object_no_content_type(self):
def no_content_type(name):
return None, None
old_func = libcloud.utils.files.guess_file_mime_type
libcloud.utils.files.guess_file_mime_type = no_content_type
file_path = os.path.abspath(__file__)
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
obj = self.driver.upload_object(file_path=file_path,
container=container,
object_name=object_name)
# Just check that the file was uploaded OK, as the fallback
# Content-Type header should be set (application/octet-stream).
self.assertEqual(obj.name, object_name)
libcloud.utils.files.guess_file_mime_type = old_func
def test_upload_object_error(self):
def dummy_content_type(name):
return 'application/zip', None
def send(instance):
raise Exception('')
old_func1 = libcloud.utils.files.guess_file_mime_type
libcloud.utils.files.guess_file_mime_type = dummy_content_type
old_func2 = AtmosMockHttp.send
AtmosMockHttp.send = send
file_path = os.path.abspath(__file__)
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
try:
self.driver.upload_object(
file_path=file_path,
container=container,
object_name=object_name)
except LibcloudError:
pass
else:
self.fail(
'Timeout while uploading but an exception was not thrown')
finally:
libcloud.utils.files.guess_file_mime_type = old_func1
AtmosMockHttp.send = old_func2
def test_upload_object_nonexistent_file(self):
def dummy_content_type(name):
return 'application/zip', None
old_func = libcloud.utils.files.guess_file_mime_type
libcloud.utils.files.guess_file_mime_type = dummy_content_type
file_path = os.path.abspath(__file__ + '.inexistent')
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
try:
self.driver.upload_object(
file_path=file_path,
container=container,
object_name=object_name)
except OSError:
pass
else:
self.fail('Inesitent but an exception was not thrown')
finally:
libcloud.utils.files.guess_file_mime_type = old_func
def test_upload_object_via_stream_new_object(self):
def dummy_content_type(name):
return 'application/zip', None
old_func = libcloud.storage.drivers.atmos.guess_file_mime_type
libcloud.storage.drivers.atmos.guess_file_mime_type = dummy_content_type
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftsdn'
iterator = DummyIterator(data=['2', '3', '5'])
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator)
finally:
libcloud.storage.drivers.atmos.guess_file_mime_type = old_func
def test_upload_object_via_stream_existing_object(self):
def dummy_content_type(name):
return 'application/zip', None
old_func = libcloud.storage.drivers.atmos.guess_file_mime_type
libcloud.storage.drivers.atmos.guess_file_mime_type = dummy_content_type
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftsde'
iterator = DummyIterator(data=['2', '3', '5'])
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator)
finally:
libcloud.storage.drivers.atmos.guess_file_mime_type = old_func
def test_upload_object_via_stream_no_content_type(self):
def no_content_type(name):
return None, None
old_func = libcloud.storage.drivers.atmos.guess_file_mime_type
libcloud.storage.drivers.atmos.guess_file_mime_type = no_content_type
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftsdct'
iterator = DummyIterator(data=['2', '3', '5'])
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator)
except AttributeError:
pass
else:
self.fail(
'File content type not provided'
' but an exception was not thrown')
finally:
libcloud.storage.drivers.atmos.guess_file_mime_type = old_func
def test_signature_algorithm(self):
test_uid = 'fredsmagicuid'
test_key = base64.b64encode(b('ssssshhhhhmysecretkey'))
test_date = 'Mon, 04 Jul 2011 07:39:19 GMT'
test_values = [
('GET', '/rest/namespace/foo', '', {},
'WfSASIA25TuqO2n0aO9k/dtg6S0='),
('GET', '/rest/namespace/foo%20%26%20bar', '', {},
'vmlqXqcInxxoP4YX5mR09BonjX4='),
('POST', '/rest/namespace/foo', '', {},
'oYKdsF+1DOuUT7iX5CJCDym2EQk='),
('PUT', '/rest/namespace/foo', '', {},
'JleF9dpSWhaT3B2swZI3s41qqs4='),
('DELETE', '/rest/namespace/foo', '', {},
'2IX+Bd5XZF5YY+g4P59qXV1uLpo='),
('GET', '/rest/namespace/foo?metata/system', '', {},
'zuHDEAgKM1winGnWn3WBsqnz4ks='),
('POST', '/rest/namespace/foo?metadata/user', '', {
'x-emc-meta': 'fakemeta=fake, othermeta=faketoo'
}, '7sLx1nxPIRAtocfv02jz9h1BjbU='),
]
class FakeDriver(object):
path = ''
for method, action, api_path, headers, expected in test_values:
c = AtmosConnection(test_uid, test_key)
c.method = method
c.action = action
d = FakeDriver()
d.path = api_path
c.driver = d
headers = c.add_default_headers(headers)
headers['Date'] = headers['x-emc-date'] = test_date
self.assertEqual(c._calculate_signature({}, headers),
b(expected).decode('utf-8'))
class AtmosMockHttp(StorageMockHttp, unittest.TestCase):
fixtures = StorageFileFixtures('atmos')
upload_created = False
upload_stream_created = False
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self)
if kwargs.get('host', None) and kwargs.get('port', None):
StorageMockHttp.__init__(self, *args, **kwargs)
self._upload_object_via_stream_first_request = True
def runTest(self):
pass
def request(self, method, url, body=None, headers=None, raw=False):
headers = headers or {}
parsed = urlparse.urlparse(url)
if parsed.query.startswith('metadata/'):
parsed = list(parsed)
parsed[2] = parsed[2] + '/' + parsed[4]
parsed[4] = ''
url = urlparse.urlunparse(parsed)
return super(AtmosMockHttp, self).request(method, url, body, headers,
raw)
def _rest_namespace_EMPTY(self, method, url, body, headers):
body = self.fixtures.load('empty_directory_listing.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace(self, method, url, body, headers):
body = self.fixtures.load('list_containers.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_test_container_EMPTY(self, method, url, body, headers):
body = self.fixtures.load('empty_directory_listing.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_test_container(self, method, url, body, headers):
body = self.fixtures.load('list_containers.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_test_container__metadata_system(
self, method, url, body,
headers):
headers = {
'x-emc-meta': 'objectid=b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9'
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_20_26_20container__metadata_system(
self, method, url, body,
headers):
headers = {
'x-emc-meta': 'objectid=b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9'
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_not_found__metadata_system(self, method, url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_test_create_container(self, method, url, body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_test_create_container__metadata_system(self, method,
url, body,
headers):
headers = {
'x-emc-meta': 'objectid=31a27b593629a3fe59f887fd973fd953e80062ce'
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_create_container_ALREADY_EXISTS(self, method, url,
body, headers):
body = self.fixtures.load('already_exists.xml')
return (httplib.BAD_REQUEST, body, {},
httplib.responses[httplib.BAD_REQUEST])
def _rest_namespace_foo_bar_container(self, method, url, body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_bar_container_NOT_FOUND(self, method, url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_foo_bar_container_NOT_EMPTY(self, method, url, body,
headers):
body = self.fixtures.load('not_empty.xml')
return (httplib.BAD_REQUEST, body, {},
httplib.responses[httplib.BAD_REQUEST])
def _rest_namespace_test_container_test_object_metadata_system(
self, method,
url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_20_26_20container_test_20_26_20object_metadata_system(
self, method,
url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_container_test_object_metadata_user(self, method,
url, body,
headers):
meta = {
'md5': '6b21c4a111ac178feacf9ec9d0c71f17',
'foo-bar': 'test 1',
'bar-foo': 'test 2',
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_20_26_20container_test_20_26_20object_metadata_user(
self, method,
url, body,
headers):
meta = {
'md5': '6b21c4a111ac178feacf9ec9d0c71f17',
'foo-bar': 'test 1',
'bar-foo': 'test 2',
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_container_not_found_metadata_system(self, method,
url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_foo_bar_container_foo_bar_object(self, method, url,
body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_20_26_20bar_container_foo_20_26_20bar_object(
self, method, url,
body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_bar_container_foo_bar_object_NOT_FOUND(
self, method,
url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_fbc_ftu_metadata_system(self, method, url, body,
headers):
if not self.upload_created:
self.__class__.upload_created = True
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
self.__class__.upload_created = False
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftu_metadata_user(self, method, url, body, headers):
self.assertTrue('x-emc-meta' in headers)
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsdn_metadata_system(self, method, url, body,
headers):
if not self.upload_stream_created:
self.__class__.upload_stream_created = True
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
self.__class__.upload_stream_created = False
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsdn(self, method, url, body, headers):
if self._upload_object_via_stream_first_request:
self.assertTrue('Range' not in headers)
self.assertEqual(method, 'POST')
self._upload_object_via_stream_first_request = False
else:
self.assertTrue('Range' in headers)
self.assertEqual(method, 'PUT')
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsdn_metadata_user(self, method, url, body,
headers):
self.assertTrue('x-emc-meta' in headers)
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsde_metadata_system(self, method, url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsde(self, method, url, body, headers):
if self._upload_object_via_stream_first_request:
self.assertTrue('Range' not in headers)
self._upload_object_via_stream_first_request = False
else:
self.assertTrue('Range' in headers)
self.assertEqual(method, 'PUT')
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsde_metadata_user(self, method, url, body,
headers):
self.assertTrue('x-emc-meta' in headers)
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsd_metadata_system(self, method, url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
class AtmosMockRawResponse(MockRawResponse):
fixtures = StorageFileFixtures('atmos')
def _rest_namespace_foo_bar_container_foo_bar_object(self, method, url,
body, headers):
body = self._generate_random_data(1000)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_20_26_20bar_container_foo_20_26_20bar_object(
self, method, url,
body, headers):
body = self._generate_random_data(1000)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_bar_container_foo_bar_object_NOT_FOUND(
self, method,
url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_fbc_ftu(self, method, url, body, headers):
return (httplib.CREATED, '', {}, httplib.responses[httplib.CREATED])
if __name__ == '__main__':
sys.exit(unittest.main())
| apache-2.0 | -8,578,702,137,064,874,000 | -1,145,857,039,965,914,400 | 40.86399 | 83 | 0.571521 | false |
BogusCurry/BlocksCAD | closure-library/closure/bin/calcdeps.py | 223 | 18544 | #!/usr/bin/env python
#
# Copyright 2006 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Calculates JavaScript dependencies without requiring Google's build system.
This tool is deprecated and is provided for legacy users.
See build/closurebuilder.py and build/depswriter.py for the current tools.
It iterates over a number of search paths and builds a dependency tree. With
the inputs provided, it walks the dependency tree and outputs all the files
required for compilation.
"""
try:
import distutils.version
except ImportError:
# distutils is not available in all environments
distutils = None
import logging
import optparse
import os
import re
import subprocess
import sys
_BASE_REGEX_STRING = '^\s*goog\.%s\(\s*[\'"](.+)[\'"]\s*\)'
req_regex = re.compile(_BASE_REGEX_STRING % 'require')
prov_regex = re.compile(_BASE_REGEX_STRING % 'provide')
ns_regex = re.compile('^ns:((\w+\.)*(\w+))$')
version_regex = re.compile('[\.0-9]+')
def IsValidFile(ref):
"""Returns true if the provided reference is a file and exists."""
return os.path.isfile(ref)
def IsJsFile(ref):
"""Returns true if the provided reference is a Javascript file."""
return ref.endswith('.js')
def IsNamespace(ref):
"""Returns true if the provided reference is a namespace."""
return re.match(ns_regex, ref) is not None
def IsDirectory(ref):
"""Returns true if the provided reference is a directory."""
return os.path.isdir(ref)
def ExpandDirectories(refs):
"""Expands any directory references into inputs.
Description:
Looks for any directories in the provided references. Found directories
are recursively searched for .js files, which are then added to the result
list.
Args:
refs: a list of references such as files, directories, and namespaces
Returns:
A list of references with directories removed and replaced by any
.js files that are found in them. Also, the paths will be normalized.
"""
result = []
for ref in refs:
if IsDirectory(ref):
# Disable 'Unused variable' for subdirs
# pylint: disable=unused-variable
for (directory, subdirs, filenames) in os.walk(ref):
for filename in filenames:
if IsJsFile(filename):
result.append(os.path.join(directory, filename))
else:
result.append(ref)
return map(os.path.normpath, result)
class DependencyInfo(object):
"""Represents a dependency that is used to build and walk a tree."""
def __init__(self, filename):
self.filename = filename
self.provides = []
self.requires = []
def __str__(self):
return '%s Provides: %s Requires: %s' % (self.filename,
repr(self.provides),
repr(self.requires))
def BuildDependenciesFromFiles(files):
"""Build a list of dependencies from a list of files.
Description:
Takes a list of files, extracts their provides and requires, and builds
out a list of dependency objects.
Args:
files: a list of files to be parsed for goog.provides and goog.requires.
Returns:
A list of dependency objects, one for each file in the files argument.
"""
result = []
filenames = set()
for filename in files:
if filename in filenames:
continue
# Python 3 requires the file encoding to be specified
if (sys.version_info[0] < 3):
file_handle = open(filename, 'r')
else:
file_handle = open(filename, 'r', encoding='utf8')
try:
dep = CreateDependencyInfo(filename, file_handle)
result.append(dep)
finally:
file_handle.close()
filenames.add(filename)
return result
def CreateDependencyInfo(filename, source):
"""Create dependency info.
Args:
filename: Filename for source.
source: File-like object containing source.
Returns:
A DependencyInfo object with provides and requires filled.
"""
dep = DependencyInfo(filename)
for line in source:
if re.match(req_regex, line):
dep.requires.append(re.search(req_regex, line).group(1))
if re.match(prov_regex, line):
dep.provides.append(re.search(prov_regex, line).group(1))
return dep
def BuildDependencyHashFromDependencies(deps):
"""Builds a hash for searching dependencies by the namespaces they provide.
Description:
Dependency objects can provide multiple namespaces. This method enumerates
the provides of each dependency and adds them to a hash that can be used
to easily resolve a given dependency by a namespace it provides.
Args:
deps: a list of dependency objects used to build the hash.
Raises:
Exception: If a multiple files try to provide the same namepace.
Returns:
A hash table { namespace: dependency } that can be used to resolve a
dependency by a namespace it provides.
"""
dep_hash = {}
for dep in deps:
for provide in dep.provides:
if provide in dep_hash:
raise Exception('Duplicate provide (%s) in (%s, %s)' % (
provide,
dep_hash[provide].filename,
dep.filename))
dep_hash[provide] = dep
return dep_hash
def CalculateDependencies(paths, inputs):
"""Calculates the dependencies for given inputs.
Description:
This method takes a list of paths (files, directories) and builds a
searchable data structure based on the namespaces that each .js file
provides. It then parses through each input, resolving dependencies
against this data structure. The final output is a list of files,
including the inputs, that represent all of the code that is needed to
compile the given inputs.
Args:
paths: the references (files, directories) that are used to build the
dependency hash.
inputs: the inputs (files, directories, namespaces) that have dependencies
that need to be calculated.
Raises:
Exception: if a provided input is invalid.
Returns:
A list of all files, including inputs, that are needed to compile the given
inputs.
"""
deps = BuildDependenciesFromFiles(paths + inputs)
search_hash = BuildDependencyHashFromDependencies(deps)
result_list = []
seen_list = []
for input_file in inputs:
if IsNamespace(input_file):
namespace = re.search(ns_regex, input_file).group(1)
if namespace not in search_hash:
raise Exception('Invalid namespace (%s)' % namespace)
input_file = search_hash[namespace].filename
if not IsValidFile(input_file) or not IsJsFile(input_file):
raise Exception('Invalid file (%s)' % input_file)
seen_list.append(input_file)
file_handle = open(input_file, 'r')
try:
for line in file_handle:
if re.match(req_regex, line):
require = re.search(req_regex, line).group(1)
ResolveDependencies(require, search_hash, result_list, seen_list)
finally:
file_handle.close()
result_list.append(input_file)
# All files depend on base.js, so put it first.
base_js_path = FindClosureBasePath(paths)
if base_js_path:
result_list.insert(0, base_js_path)
else:
logging.warning('Closure Library base.js not found.')
return result_list
def FindClosureBasePath(paths):
"""Given a list of file paths, return Closure base.js path, if any.
Args:
paths: A list of paths.
Returns:
The path to Closure's base.js file including filename, if found.
"""
for path in paths:
pathname, filename = os.path.split(path)
if filename == 'base.js':
f = open(path)
is_base = False
# Sanity check that this is the Closure base file. Check that this
# is where goog is defined. This is determined by the @provideGoog
# flag.
for line in f:
if '@provideGoog' in line:
is_base = True
break
f.close()
if is_base:
return path
def ResolveDependencies(require, search_hash, result_list, seen_list):
"""Takes a given requirement and resolves all of the dependencies for it.
Description:
A given requirement may require other dependencies. This method
recursively resolves all dependencies for the given requirement.
Raises:
Exception: when require does not exist in the search_hash.
Args:
require: the namespace to resolve dependencies for.
search_hash: the data structure used for resolving dependencies.
result_list: a list of filenames that have been calculated as dependencies.
This variable is the output for this function.
seen_list: a list of filenames that have been 'seen'. This is required
for the dependency->dependant ordering.
"""
if require not in search_hash:
raise Exception('Missing provider for (%s)' % require)
dep = search_hash[require]
if not dep.filename in seen_list:
seen_list.append(dep.filename)
for sub_require in dep.requires:
ResolveDependencies(sub_require, search_hash, result_list, seen_list)
result_list.append(dep.filename)
def GetDepsLine(dep, base_path):
"""Returns a JS string for a dependency statement in the deps.js file.
Args:
dep: The dependency that we're printing.
base_path: The path to Closure's base.js including filename.
"""
return 'goog.addDependency("%s", %s, %s);' % (
GetRelpath(dep.filename, base_path), dep.provides, dep.requires)
def GetRelpath(path, start):
"""Return a relative path to |path| from |start|."""
# NOTE: Python 2.6 provides os.path.relpath, which has almost the same
# functionality as this function. Since we want to support 2.4, we have
# to implement it manually. :(
path_list = os.path.abspath(os.path.normpath(path)).split(os.sep)
start_list = os.path.abspath(
os.path.normpath(os.path.dirname(start))).split(os.sep)
common_prefix_count = 0
for i in range(0, min(len(path_list), len(start_list))):
if path_list[i] != start_list[i]:
break
common_prefix_count += 1
# Always use forward slashes, because this will get expanded to a url,
# not a file path.
return '/'.join(['..'] * (len(start_list) - common_prefix_count) +
path_list[common_prefix_count:])
def PrintLine(msg, out):
out.write(msg)
out.write('\n')
def PrintDeps(source_paths, deps, out):
"""Print out a deps.js file from a list of source paths.
Args:
source_paths: Paths that we should generate dependency info for.
deps: Paths that provide dependency info. Their dependency info should
not appear in the deps file.
out: The output file.
Returns:
True on success, false if it was unable to find the base path
to generate deps relative to.
"""
base_path = FindClosureBasePath(source_paths + deps)
if not base_path:
return False
PrintLine('// This file was autogenerated by calcdeps.py', out)
excludesSet = set(deps)
for dep in BuildDependenciesFromFiles(source_paths + deps):
if not dep.filename in excludesSet:
PrintLine(GetDepsLine(dep, base_path), out)
return True
def PrintScript(source_paths, out):
for index, dep in enumerate(source_paths):
PrintLine('// Input %d' % index, out)
f = open(dep, 'r')
PrintLine(f.read(), out)
f.close()
def GetJavaVersion():
"""Returns the string for the current version of Java installed."""
proc = subprocess.Popen(['java', '-version'], stderr=subprocess.PIPE)
proc.wait()
version_line = proc.stderr.read().splitlines()[0]
return version_regex.search(version_line).group()
def FilterByExcludes(options, files):
"""Filters the given files by the exlusions specified at the command line.
Args:
options: The flags to calcdeps.
files: The files to filter.
Returns:
A list of files.
"""
excludes = []
if options.excludes:
excludes = ExpandDirectories(options.excludes)
excludesSet = set(excludes)
return [i for i in files if not i in excludesSet]
def GetPathsFromOptions(options):
"""Generates the path files from flag options.
Args:
options: The flags to calcdeps.
Returns:
A list of files in the specified paths. (strings).
"""
search_paths = options.paths
if not search_paths:
search_paths = ['.'] # Add default folder if no path is specified.
search_paths = ExpandDirectories(search_paths)
return FilterByExcludes(options, search_paths)
def GetInputsFromOptions(options):
"""Generates the inputs from flag options.
Args:
options: The flags to calcdeps.
Returns:
A list of inputs (strings).
"""
inputs = options.inputs
if not inputs: # Parse stdin
logging.info('No inputs specified. Reading from stdin...')
inputs = filter(None, [line.strip('\n') for line in sys.stdin.readlines()])
logging.info('Scanning files...')
inputs = ExpandDirectories(inputs)
return FilterByExcludes(options, inputs)
def Compile(compiler_jar_path, source_paths, out, flags=None):
"""Prepares command-line call to Closure compiler.
Args:
compiler_jar_path: Path to the Closure compiler .jar file.
source_paths: Source paths to build, in order.
flags: A list of additional flags to pass on to Closure compiler.
"""
args = ['java', '-jar', compiler_jar_path]
for path in source_paths:
args += ['--js', path]
if flags:
args += flags
logging.info('Compiling with the following command: %s', ' '.join(args))
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
(stdoutdata, stderrdata) = proc.communicate()
if proc.returncode != 0:
logging.error('JavaScript compilation failed.')
sys.exit(1)
else:
out.write(stdoutdata)
def main():
"""The entrypoint for this script."""
logging.basicConfig(format='calcdeps.py: %(message)s', level=logging.INFO)
usage = 'usage: %prog [options] arg'
parser = optparse.OptionParser(usage)
parser.add_option('-i',
'--input',
dest='inputs',
action='append',
help='The inputs to calculate dependencies for. Valid '
'values can be files, directories, or namespaces '
'(ns:goog.net.XhrIo). Only relevant to "list" and '
'"script" output.')
parser.add_option('-p',
'--path',
dest='paths',
action='append',
help='The paths that should be traversed to build the '
'dependencies.')
parser.add_option('-d',
'--dep',
dest='deps',
action='append',
help='Directories or files that should be traversed to '
'find required dependencies for the deps file. '
'Does not generate dependency information for names '
'provided by these files. Only useful in "deps" mode.')
parser.add_option('-e',
'--exclude',
dest='excludes',
action='append',
help='Files or directories to exclude from the --path '
'and --input flags')
parser.add_option('-o',
'--output_mode',
dest='output_mode',
action='store',
default='list',
help='The type of output to generate from this script. '
'Options are "list" for a list of filenames, "script" '
'for a single script containing the contents of all the '
'file, "deps" to generate a deps.js file for all '
'paths, or "compiled" to produce compiled output with '
'the Closure compiler.')
parser.add_option('-c',
'--compiler_jar',
dest='compiler_jar',
action='store',
help='The location of the Closure compiler .jar file.')
parser.add_option('-f',
'--compiler_flag',
'--compiler_flags', # for backwards compatability
dest='compiler_flags',
action='append',
help='Additional flag to pass to the Closure compiler. '
'May be specified multiple times to pass multiple flags.')
parser.add_option('--output_file',
dest='output_file',
action='store',
help=('If specified, write output to this path instead of '
'writing to standard output.'))
(options, args) = parser.parse_args()
search_paths = GetPathsFromOptions(options)
if options.output_file:
out = open(options.output_file, 'w')
else:
out = sys.stdout
if options.output_mode == 'deps':
result = PrintDeps(search_paths, ExpandDirectories(options.deps or []), out)
if not result:
logging.error('Could not find Closure Library in the specified paths')
sys.exit(1)
return
inputs = GetInputsFromOptions(options)
logging.info('Finding Closure dependencies...')
deps = CalculateDependencies(search_paths, inputs)
output_mode = options.output_mode
if output_mode == 'script':
PrintScript(deps, out)
elif output_mode == 'list':
# Just print out a dep per line
for dep in deps:
PrintLine(dep, out)
elif output_mode == 'compiled':
# Make sure a .jar is specified.
if not options.compiler_jar:
logging.error('--compiler_jar flag must be specified if --output is '
'"compiled"')
sys.exit(1)
# User friendly version check.
if distutils and not (distutils.version.LooseVersion(GetJavaVersion()) >
distutils.version.LooseVersion('1.6')):
logging.error('Closure Compiler requires Java 1.6 or higher.')
logging.error('Please visit http://www.java.com/getjava')
sys.exit(1)
Compile(options.compiler_jar, deps, out, options.compiler_flags)
else:
logging.error('Invalid value for --output flag.')
sys.exit(1)
if __name__ == '__main__':
main()
| gpl-3.0 | 7,389,026,041,879,878,000 | -6,186,027,320,706,296,000 | 30.430508 | 80 | 0.652179 | false |
rikiji/gnome-wsname-applet | wsname.py | 1 | 6204 | #!/usr/bin/env python
import pygtk
pygtk.require('2.0')
import gtk
import gnomeapplet
import gobject
import wnck
import sys
import codecs
import random
# debugging
# import os
# new_stdout = open ("/tmp/debug.stdout", "w")
# new_stderr = open ("/tmp/debug.stderr", "w")
# os.dup2(new_stdout.fileno(), sys.stdout.fileno())
# os.dup2(new_stderr.fileno(), sys.stderr.fileno())
class AlignedWindow(gtk.Window):
def __init__(self, widgetToAlignWith):
gtk.Window.__init__(self, gtk.WINDOW_TOPLEVEL)
self.set_decorated(False)
self.widgetToAlignWith = widgetToAlignWith
def positionWindow(self):
# Get our own dimensions & position
self.realize()
gtk.gdk.flush()
#print self.window.get_geometry()
ourWidth = (self.window.get_geometry())[2]
ourHeight = (self.window.get_geometry())[3]
# Skip the taskbar, and the pager, stick and stay on top
self.stick()
# not wrapped self.set_skip_taskbar_hint(True)
# not wrapped self.set_skip_pager_hint(True)
self.set_type_hint (gtk.gdk.WINDOW_TYPE_HINT_DOCK)
# Get the dimensions/position of the widgetToAlignWith
self.widgetToAlignWith.realize()
entryX, entryY = self.widgetToAlignWith.window.get_origin()
entryWidth = (self.widgetToAlignWith.window.get_geometry())[2]
entryHeight = (self.widgetToAlignWith.window.get_geometry())[3]
# Get the screen dimensions
screenHeight = gtk.gdk.screen_height()
screenWidth = gtk.gdk.screen_width()
if entryX + ourWidth < screenWidth:
# Align to the left of the entry
newX = entryX
else:
# Align to the right of the entry
newX = (entryX + entryWidth) - ourWidth
if entryY + entryHeight + ourHeight < screenHeight:
# Align to the bottom of the entry
newY = entryY + entryHeight
else:
newY = entryY - ourHeight
# -"Coordinates locked in captain."
# -"Engage."
self.move(newX, newY)
self.show()
class WSNameEntryWindow(AlignedWindow):
def __init__(self, widget, app):
AlignedWindow.__init__(self, widget)
self.app = app
frame = gtk.Frame()
#frame.set_shadow_type(gtk.SHADOW_OUT)
self.entry = gtk.Entry()
frame.add(self.entry)
self.add(frame)
self.set_default_size(0,0)
self.entry.connect("activate", self._on_activate)
self.entry.connect("key-release-event", self._on_key_release)
self.entry.connect("leave-notify-event", self._on_activate)
def _on_activate(self, event):
self.app.workspace.change_name(self.entry.get_text())
self.entryvisible = False
self.hide()
def _on_key_release(self, widget, event):
if event.keyval == gtk.keysyms.Escape:
self.app.entryvisible = False
self.entry_window.hide()
class WSNameApplet(gnomeapplet.Applet):
_name_change_handler_id = None
workspace = None
entryvisible = False
def __init__(self,applet,iid):
self.applet = applet
self.menu = gtk.MenuBar()
self.menuitem = gtk.MenuItem()
self.label = gtk.Label()
self.label.set_size_request(120,-1)
self.menuitem.connect("button-press-event", self._on_button_press)
self.applet.connect("change-background", self._on_change_background)
self.applet.add(self.menu)
self.menu.add(self.menuitem)
self.menuitem.add(self.label)
self.screen = wnck.screen_get_default()
self.screen.connect("active_workspace_changed", self._on_workspace_changed)
self.entry_window = WSNameEntryWindow(self.applet, self)
self.workspace = really_get_active_workspace(self.screen)
self.show_workspace_name()
self._name_change_handler_id = None
def toggle_entry(self):
if self.entryvisible == True:
self.entryvisible = False
self.entry_window.hide()
else:
self.entryvisible = True
self.entry_window.positionWindow()
self.entry_window.show_all()
self.entry_window.present()
self.entry_window.entry.set_text(self.workspace.get_name())
gobject.timeout_add(0, self.entry_window.entry.grab_focus)
def _on_button_press(self, menuitem, event):
if event.button != 1:
menuitem.stop_emission("button-press-event")
self.toggle_entry()
def _on_workspace_changed(self, event, old_workspace):
if (self._name_change_handler_id):
self.workspace.disconnect(self._name_change_handler_id)
self.workspace = really_get_active_workspace(self.screen)
self._name_change_handler_id = self.workspace.connect("name-changed", self._on_workspace_name_changed)
self.show_workspace_name()
def _on_workspace_name_changed(self, event):
self.show_workspace_name()
def show_workspace_name(self):
self.label.set_text(self.workspace.get_name())
self.applet.show_all()
def _on_change_background(self, applet, type, color, pixmap):
applet.set_style(None)
rc_style = gtk.RcStyle()
applet.modify_style(rc_style)
if (type == gnomeapplet.COLOR_BACKGROUND):
applet.modify_bg(gtk.STATE_NORMAL, color)
elif (type == gnomeapplet.PIXMAP_BACKGROUND):
style = applet.style
style.bg_pixmap[gtk.STATE_NORMAL] = pixmap
self.applet.set_style(style)
def really_get_active_workspace(screen):
# This bit is needed because wnck is asynchronous.
while gtk.events_pending():
gtk.main_iteration()
return screen.get_active_workspace()
gobject.type_register(WSNameApplet)
def app_factory(applet,iid):
return WSNameApplet(applet,iid)
if len(sys.argv) == 2:
main_window = gtk.Window(gtk.WINDOW_TOPLEVEL)
main_window.connect("destroy", gtk.main_quit)
main_window.set_title("Applet")
app = gnomeapplet.Applet()
app_factory(app,None)
app.reparent(main_window)
main_window.show_all()
gtk.main()
sys.exit()
gnomeapplet.bonobo_factory("OAFIID:GNOME_wsname_Factory", WSNameApplet.__gtype__, "ws-name", "0", app_factory)
| gpl-2.0 | -2,272,103,988,872,025,900 | -1,951,827,869,599,239,200 | 30.979381 | 110 | 0.644262 | false |
robin900/sqlalchemy | test/ext/test_indexable.py | 2 | 10585 | from sqlalchemy.testing import assert_raises
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy import Integer, Text
from sqlalchemy.sql.sqltypes import ARRAY, JSON
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import Session
from sqlalchemy.testing import fixtures
from sqlalchemy.ext.indexable import index_property
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.testing import eq_, ne_, is_, in_, not_in_
from sqlalchemy import inspect
class IndexPropertyTest(fixtures.TestBase):
def test_array(self):
Base = declarative_base()
class A(Base):
__tablename__ = 'a'
id = Column('id', Integer, primary_key=True)
array = Column('_array', ARRAY(Integer),
default=[])
first = index_property('array', 0)
tenth = index_property('array', 9)
a = A(array=[1, 2, 3])
eq_(a.first, 1)
assert_raises(AttributeError, lambda: a.tenth)
a.first = 100
eq_(a.first, 100)
eq_(a.array, [100, 2, 3])
del a.first
eq_(a.first, 2)
a2 = A(first=5)
eq_(a2.first, 5)
eq_(a2.array, [5])
def test_array_longinit(self):
Base = declarative_base()
class A(Base):
__tablename__ = 'a'
id = Column('id', Integer, primary_key=True)
array = Column('_array', ARRAY(Integer),
default=[])
first = index_property('array', 0)
fifth = index_property('array', 4)
a1 = A(fifth=10)
a2 = A(first=5)
eq_(a1.array, [None, None, None, None, 10])
eq_(a2.array, [5])
assert_raises(IndexError, setattr, a2, "fifth", 10)
def test_json(self):
Base = declarative_base()
class J(Base):
__tablename__ = 'j'
id = Column('id', Integer, primary_key=True)
json = Column('_json', JSON, default={})
field = index_property('json', 'field')
j = J(json={'a': 1, 'b': 2})
assert_raises(AttributeError, lambda: j.field)
j.field = 'test'
eq_(j.field, 'test')
eq_(j.json, {'a': 1, 'b': 2, 'field': 'test'})
j2 = J(field='test')
eq_(j2.json, {"field": "test"})
eq_(j2.field, "test")
def test_value_is_none_attributeerror(self):
Base = declarative_base()
class A(Base):
__tablename__ = 'a'
id = Column('id', Integer, primary_key=True)
array = Column('_array', ARRAY(Integer))
first = index_property('array', 1)
a = A()
assert_raises(AttributeError, getattr, a, "first")
assert_raises(AttributeError, delattr, a, "first")
def test_get_attribute_error(self):
Base = declarative_base()
class A(Base):
__tablename__ = 'a'
id = Column('id', Integer, primary_key=True)
array = Column('_array', ARRAY(Integer))
first = index_property('array', 1)
a = A(array=[])
assert_raises(AttributeError, lambda: a.first)
def test_set_immutable(self):
Base = declarative_base()
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
array = Column(ARRAY(Integer))
first = index_property('array', 1, mutable=False)
a = A()
def set_():
a.first = 10
assert_raises(AttributeError, set_)
def test_set_mutable_dict(self):
Base = declarative_base()
class J(Base):
__tablename__ = 'j'
id = Column(Integer, primary_key=True)
json = Column(JSON, default={})
field = index_property('json', 'field')
j = J()
j.field = 10
j.json = {}
assert_raises(AttributeError, lambda: j.field)
assert_raises(AttributeError, delattr, j, "field")
j.field = 10
eq_(j.field, 10)
def test_get_default_value(self):
Base = declarative_base()
class J(Base):
__tablename__ = 'j'
id = Column(Integer, primary_key=True)
json = Column(JSON, default={})
default = index_property('json', 'field', default='default')
none = index_property('json', 'field', default=None)
j = J()
assert j.json is None
assert j.default == 'default'
assert j.none is None
j.json = {}
assert j.default == 'default'
assert j.none is None
j.default = None
assert j.default is None
assert j.none is None
j.none = 10
assert j.default is 10
assert j.none == 10
class IndexPropertyArrayTest(fixtures.DeclarativeMappedTest):
__requires__ = ('array_type',)
__backend__ = True
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class Array(fixtures.ComparableEntity, Base):
__tablename__ = "array"
id = Column(sa.Integer, primary_key=True,
test_needs_autoincrement=True)
array = Column(ARRAY(Integer), default=[])
array0 = Column(ARRAY(Integer, zero_indexes=True), default=[])
first = index_property('array', 0)
first0 = index_property('array0', 0, onebased=False)
def test_query(self):
Array = self.classes.Array
s = Session(testing.db)
s.add_all([
Array(),
Array(array=[1, 2, 3], array0=[1, 2, 3]),
Array(array=[4, 5, 6], array0=[4, 5, 6])])
s.commit()
a1 = s.query(Array).filter(Array.array == [1, 2, 3]).one()
a2 = s.query(Array).filter(Array.first == 1).one()
eq_(a1.id, a2.id)
a3 = s.query(Array).filter(Array.first == 4).one()
ne_(a1.id, a3.id)
a4 = s.query(Array).filter(Array.first0 == 1).one()
eq_(a1.id, a4.id)
a5 = s.query(Array).filter(Array.first0 == 4).one()
ne_(a1.id, a5.id)
def test_mutable(self):
Array = self.classes.Array
s = Session(testing.db)
a = Array(array=[1, 2, 3])
s.add(a)
s.commit()
a.first = 42
eq_(a.first, 42)
s.commit()
eq_(a.first, 42)
del a.first
eq_(a.first, 2)
s.commit()
eq_(a.first, 2)
def test_modified(self):
from sqlalchemy import inspect
Array = self.classes.Array
s = Session(testing.db)
a = Array(array=[1, 2, 3])
s.add(a)
s.commit()
i = inspect(a)
is_(i.modified, False)
in_('array', i.unmodified)
a.first = 10
is_(i.modified, True)
not_in_('array', i.unmodified)
class IndexPropertyJsonTest(fixtures.DeclarativeMappedTest):
# TODO: remove reliance on "astext" for these tests
__requires__ = ('json_type',)
__only_on__ = 'postgresql'
__backend__ = True
@classmethod
def setup_classes(cls):
from sqlalchemy.dialects.postgresql import JSON
Base = cls.DeclarativeBasic
class json_property(index_property):
def __init__(self, attr_name, index, cast_type):
super(json_property, self).__init__(attr_name, index)
self.cast_type = cast_type
def expr(self, model):
expr = super(json_property, self).expr(model)
return expr.astext.cast(self.cast_type)
class Json(fixtures.ComparableEntity, Base):
__tablename__ = "json"
id = Column(sa.Integer, primary_key=True,
test_needs_autoincrement=True)
json = Column(JSON, default={})
field = index_property('json', 'field')
json_field = index_property('json', 'field')
int_field = json_property('json', 'field', Integer)
text_field = json_property('json', 'field', Text)
other = index_property('json', 'other')
subfield = json_property('other', 'field', Text)
def test_query(self):
Json = self.classes.Json
s = Session(testing.db)
s.add_all([
Json(),
Json(json={'field': 10}),
Json(json={'field': 20})])
s.commit()
a1 = s.query(Json).filter(Json.json['field'].astext.cast(Integer) == 10)\
.one()
a2 = s.query(Json).filter(Json.field.astext == '10').one()
eq_(a1.id, a2.id)
a3 = s.query(Json).filter(Json.field.astext == '20').one()
ne_(a1.id, a3.id)
a4 = s.query(Json).filter(Json.json_field.astext == '10').one()
eq_(a2.id, a4.id)
a5 = s.query(Json).filter(Json.int_field == 10).one()
eq_(a2.id, a5.id)
a6 = s.query(Json).filter(Json.text_field == '10').one()
eq_(a2.id, a6.id)
def test_mutable(self):
Json = self.classes.Json
s = Session(testing.db)
j = Json(json={})
s.add(j)
s.commit()
j.other = 42
eq_(j.other, 42)
s.commit()
eq_(j.other, 42)
def test_modified(self):
Json = self.classes.Json
s = Session(testing.db)
j = Json(json={})
s.add(j)
s.commit()
i = inspect(j)
is_(i.modified, False)
in_('json', i.unmodified)
j.other = 42
is_(i.modified, True)
not_in_('json', i.unmodified)
def test_cast_type(self):
Json = self.classes.Json
s = Session(testing.db)
j = Json(json={'field': 10})
s.add(j)
s.commit()
jq = s.query(Json).filter(Json.int_field == 10).one()
eq_(j.id, jq.id)
jq = s.query(Json).filter(Json.text_field == '10').one()
eq_(j.id, jq.id)
jq = s.query(Json).filter(Json.json_field.astext == '10').one()
eq_(j.id, jq.id)
jq = s.query(Json).filter(Json.text_field == 'wrong').first()
is_(jq, None)
j.json = {'field': True}
s.commit()
jq = s.query(Json).filter(Json.text_field == 'true').one()
eq_(j.id, jq.id)
def test_multi_dimension(self):
Json = self.classes.Json
s = Session(testing.db)
j = Json(json={'other': {'field': 'multi'}})
s.add(j)
s.commit()
eq_(j.other, {'field': 'multi'})
eq_(j.subfield, 'multi')
jq = s.query(Json).filter(Json.subfield == 'multi').first()
eq_(j.id, jq.id)
| mit | 1,918,893,535,955,457,800 | 5,207,822,087,940,005,000 | 27.302139 | 81 | 0.525083 | false |
brianwoo/django-tutorial | build/Django/django/contrib/auth/forms.py | 93 | 13799 | from __future__ import unicode_literals
from collections import OrderedDict
from django import forms
from django.contrib.auth import authenticate, get_user_model
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX, identify_hasher,
)
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives
from django.forms.utils import flatatt
from django.template import loader
from django.utils.encoding import force_bytes
from django.utils.html import format_html, format_html_join
from django.utils.http import urlsafe_base64_encode
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext, ugettext_lazy as _
class ReadOnlyPasswordHashWidget(forms.Widget):
def render(self, name, value, attrs):
encoded = value
final_attrs = self.build_attrs(attrs)
if not encoded or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
summary = mark_safe("<strong>%s</strong>" % ugettext("No password set."))
else:
try:
hasher = identify_hasher(encoded)
except ValueError:
summary = mark_safe("<strong>%s</strong>" % ugettext(
"Invalid password format or unknown hashing algorithm."))
else:
summary = format_html_join('',
"<strong>{}</strong>: {} ",
((ugettext(key), value)
for key, value in hasher.safe_summary(encoded).items())
)
return format_html("<div{}>{}</div>", flatatt(final_attrs), summary)
class ReadOnlyPasswordHashField(forms.Field):
widget = ReadOnlyPasswordHashWidget
def __init__(self, *args, **kwargs):
kwargs.setdefault("required", False)
super(ReadOnlyPasswordHashField, self).__init__(*args, **kwargs)
def bound_data(self, data, initial):
# Always return initial because the widget doesn't
# render an input field.
return initial
def has_changed(self, initial, data):
return False
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification."))
class Meta:
model = User
fields = ("username",)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(label=_("Password"),
help_text=_("Raw passwords are not stored, so there is no way to see "
"this user's password, but you can change the password "
"using <a href=\"password/\">this form</a>."))
class Meta:
model = User
fields = '__all__'
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions', None)
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = forms.CharField(max_length=254)
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _("Please enter a correct %(username)s and password. "
"Note that both fields may be case-sensitive."),
'inactive': _("This account is inactive."),
}
def __init__(self, request=None, *args, **kwargs):
"""
The 'request' parameter is set for custom auth use by subclasses.
The form data comes in via the standard 'data' kwarg.
"""
self.request = request
self.user_cache = None
super(AuthenticationForm, self).__init__(*args, **kwargs)
# Set the label for the "username" field.
UserModel = get_user_model()
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
if self.fields['username'].label is None:
self.fields['username'].label = capfirst(self.username_field.verbose_name)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = authenticate(username=username,
password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name},
)
else:
self.confirm_login_allowed(self.user_cache)
return self.cleaned_data
def confirm_login_allowed(self, user):
"""
Controls whether the given User may log in. This is a policy setting,
independent of end-user authentication. This default behavior is to
allow login by active users, and reject login by inactive users.
If the given user cannot log in, this method should raise a
``forms.ValidationError``.
If the given user may log in, this method should return None.
"""
if not user.is_active:
raise forms.ValidationError(
self.error_messages['inactive'],
code='inactive',
)
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class PasswordResetForm(forms.Form):
email = forms.EmailField(label=_("Email"), max_length=254)
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
"""
Sends a django.core.mail.EmailMultiAlternatives to `to_email`.
"""
subject = loader.render_to_string(subject_template_name, context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
body = loader.render_to_string(email_template_name, context)
email_message = EmailMultiAlternatives(subject, body, from_email, [to_email])
if html_email_template_name is not None:
html_email = loader.render_to_string(html_email_template_name, context)
email_message.attach_alternative(html_email, 'text/html')
email_message.send()
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This allows subclasses to more easily customize the default policies
that prevent inactive users and users with unusable passwords from
resetting their password.
"""
active_users = get_user_model()._default_manager.filter(
email__iexact=email, is_active=True)
return (u for u in active_users if u.has_usable_password())
def save(self, domain_override=None,
subject_template_name='registration/password_reset_subject.txt',
email_template_name='registration/password_reset_email.html',
use_https=False, token_generator=default_token_generator,
from_email=None, request=None, html_email_template_name=None):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
email = self.cleaned_data["email"]
for user in self.get_users(email):
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
context = {
'email': user.email,
'domain': domain,
'site_name': site_name,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'user': user,
'token': token_generator.make_token(user),
'protocol': 'https' if use_https else 'http',
}
self.send_mail(subject_template_name, email_template_name,
context, from_email, user.email,
html_email_template_name=html_email_template_name)
class SetPasswordForm(forms.Form):
"""
A form that lets a user change set their password without entering the old
password
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
new_password1 = forms.CharField(label=_("New password"),
widget=forms.PasswordInput)
new_password2 = forms.CharField(label=_("New password confirmation"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(SetPasswordForm, self).__init__(*args, **kwargs)
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
self.user.set_password(self.cleaned_data['new_password1'])
if commit:
self.user.save()
return self.user
class PasswordChangeForm(SetPasswordForm):
"""
A form that lets a user change their password by entering their old
password.
"""
error_messages = dict(SetPasswordForm.error_messages, **{
'password_incorrect': _("Your old password was entered incorrectly. "
"Please enter it again."),
})
old_password = forms.CharField(label=_("Old password"),
widget=forms.PasswordInput)
def clean_old_password(self):
"""
Validates that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.user.check_password(old_password):
raise forms.ValidationError(
self.error_messages['password_incorrect'],
code='password_incorrect',
)
return old_password
PasswordChangeForm.base_fields = OrderedDict(
(k, PasswordChangeForm.base_fields[k])
for k in ['old_password', 'new_password1', 'new_password2']
)
class AdminPasswordChangeForm(forms.Form):
"""
A form used to change the password of a user in the admin interface.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
required_css_class = 'required'
password1 = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput,
)
password2 = forms.CharField(
label=_("Password (again)"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification."),
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(AdminPasswordChangeForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
"""
Saves the new password.
"""
self.user.set_password(self.cleaned_data["password1"])
if commit:
self.user.save()
return self.user
def _get_changed_data(self):
data = super(AdminPasswordChangeForm, self).changed_data
for name in self.fields.keys():
if name not in data:
return []
return ['password']
changed_data = property(_get_changed_data)
| gpl-3.0 | -7,582,109,196,468,365,000 | 4,911,144,645,030,418,000 | 36.094086 | 99 | 0.602797 | false |
smalls257/VRvisu | Library/External.LCA_RESTRICTED/Languages/CPython/27/Lib/ctypes/test/test_struct_fields.py | 264 | 1503 | import unittest
from ctypes import *
class StructFieldsTestCase(unittest.TestCase):
# Structure/Union classes must get 'finalized' sooner or
# later, when one of these things happen:
#
# 1. _fields_ is set.
# 2. An instance is created.
# 3. The type is used as field of another Structure/Union.
# 4. The type is subclassed
#
# When they are finalized, assigning _fields_ is no longer allowed.
def test_1_A(self):
class X(Structure):
pass
self.assertEqual(sizeof(X), 0) # not finalized
X._fields_ = [] # finalized
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_1_B(self):
class X(Structure):
_fields_ = [] # finalized
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_2(self):
class X(Structure):
pass
X()
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_3(self):
class X(Structure):
pass
class Y(Structure):
_fields_ = [("x", X)] # finalizes X
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_4(self):
class X(Structure):
pass
class Y(X):
pass
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
Y._fields_ = []
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | 7,507,081,169,877,823,000 | 8,175,442,561,825,722,000 | 29.06 | 71 | 0.566866 | false |
dimroc/tensorflow-mnist-tutorial | lib/python3.6/site-packages/tensorflow/contrib/labeled_tensor/python/ops/_typecheck.py | 52 | 10165 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Minimal runtime type checking library.
This module should not be considered public API.
"""
# TODO(ericmc,shoyer): Delete this in favor of using pytype or mypy
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import inspect
import re
# used for register_type_abbreviation and _type_repr below.
_TYPE_ABBREVIATIONS = {}
class Type(object):
"""Base class for type checker types.
The custom types defined in this module are based on types in the standard
library's typing module (in Python 3.5):
https://docs.python.org/3/library/typing.html
The only difference should be that we use actual instances of Type classes to
represent custom types rather than the metaclass magic typing uses to create
new class objects. In practice, all this should mean is that we use
`List(int)` rather than `List[int]`.
Custom types should implement __instancecheck__ and inherit from Type. Every
argument in the constructor must be a type or Type instance, and these
arguments must be stored as a tuple on the `_types` attribute.
"""
def __init__(self, *types):
self._types = types
def __repr__(self):
args_repr = ", ".join(repr(t) for t in self._types)
return "typecheck.%s(%s)" % (type(self).__name__, args_repr)
class _SingleArgumentType(Type):
"""Use this subclass for parametric types that accept only one argument."""
def __init__(self, tpe):
super(_SingleArgumentType, self).__init__(tpe)
@property
def _type(self):
tpe, = self._types # pylint: disable=unbalanced-tuple-unpacking
return tpe
class _TwoArgumentType(Type):
"""Use this subclass for parametric types that accept two arguments."""
def __init__(self, first_type, second_type):
super(_TwoArgumentType, self).__init__(first_type, second_type)
class Union(Type):
"""A sum type.
A correct type is any of the types provided.
"""
def __instancecheck__(self, instance):
return isinstance(instance, self._types)
class Optional(_SingleArgumentType):
"""An optional type.
A correct type is either the provided type or NoneType.
"""
def __instancecheck__(self, instance):
# types.NoneType does not exist in Python 3
return isinstance(instance, (self._type, type(None)))
class List(_SingleArgumentType):
"""A typed list.
A correct type is a list where each element has the single provided type.
"""
def __instancecheck__(self, instance):
return (isinstance(instance, list)
and all(isinstance(x, self._type) for x in instance))
class Sequence(_SingleArgumentType):
"""A typed sequence.
A correct type is a sequence where each element has the single provided type.
"""
def __instancecheck__(self, instance):
return (isinstance(instance, collections.Sequence)
and all(isinstance(x, self._type) for x in instance))
class Collection(_SingleArgumentType):
"""A sized, iterable container.
A correct type is an iterable and container with known size where each element
has the single provided type.
We use this in preference to Iterable because we check each instance of the
iterable at runtime, and hence need to avoid iterables that could be
exhausted.
"""
def __instancecheck__(self, instance):
return (isinstance(instance, collections.Iterable)
and isinstance(instance, collections.Sized)
and isinstance(instance, collections.Container)
and all(isinstance(x, self._type) for x in instance))
class Tuple(Type):
"""A typed tuple.
A correct type is a tuple with the correct length where each element has
the correct type.
"""
def __instancecheck__(self, instance):
return (isinstance(instance, tuple)
and len(instance) == len(self._types)
and all(isinstance(x, t) for x, t in zip(instance, self._types)))
class Mapping(_TwoArgumentType):
"""A typed mapping.
A correct type has the correct parametric types for keys and values.
"""
def __instancecheck__(self, instance):
key_type, value_type = self._types # pylint: disable=unbalanced-tuple-unpacking
return (isinstance(instance, collections.Mapping)
and all(isinstance(k, key_type) for k in instance.keys())
and all(isinstance(k, value_type) for k in instance.values()))
class Dict(Mapping):
"""A typed dict.
A correct type has the correct parametric types for keys and values.
"""
def __instancecheck__(self, instance):
return (isinstance(instance, dict)
and super(Dict, self).__instancecheck__(instance))
def _replace_forward_references(t, context):
"""Replace forward references in the given type."""
if isinstance(t, str):
return context[t]
elif isinstance(t, Type):
return type(t)(*[_replace_forward_references(t, context) for t in t._types]) # pylint: disable=protected-access
else:
return t
def register_type_abbreviation(name, alias):
"""Register an abbreviation for a type in typecheck tracebacks.
This makes otherwise very long typecheck errors much more readable.
Example:
typecheck.register_type_abbreviation(tf.Dimension, 'tf.Dimension')
Args:
name: type or class to abbreviate.
alias: string alias to substitute.
"""
_TYPE_ABBREVIATIONS[name] = alias
def _type_repr(t):
"""A more succinct repr for typecheck tracebacks."""
string = repr(t)
for type_, alias in _TYPE_ABBREVIATIONS.items():
string = string.replace(repr(type_), alias)
string = re.sub(r"<(class|type) '([\w.]+)'>", r"\2", string)
string = re.sub(r"typecheck\.(\w+)", r"\1", string)
return string
class Error(TypeError):
"""Exception for typecheck failures."""
def accepts(*types):
"""A decorator which checks the input types of a function.
Based on:
http://stackoverflow.com/questions/15299878/how-to-use-python-decorators-to-check-function-arguments
The above draws from:
https://www.python.org/dev/peps/pep-0318/
Args:
*types: A list of Python types.
Returns:
A function to use as a decorator.
"""
def check_accepts(f):
"""Check the types."""
spec = inspect.getargspec(f)
num_function_arguments = len(spec.args)
if len(types) != num_function_arguments:
raise Error(
"Function %r has %d arguments but only %d types were provided in the "
"annotation." % (f, num_function_arguments, len(types)))
if spec.defaults:
num_defaults = len(spec.defaults)
for (name, a, t) in zip(spec.args[-num_defaults:],
spec.defaults,
types[-num_defaults:]):
allowed_type = _replace_forward_references(t, f.__globals__)
if not isinstance(a, allowed_type):
raise Error("default argument value %r of type %r is not an instance "
"of the allowed type %s for the %s argument to %r"
% (a, type(a), _type_repr(allowed_type), name, f))
@functools.wraps(f)
def new_f(*args, **kwds):
"""A helper function."""
for (a, t) in zip(args, types):
allowed_type = _replace_forward_references(t, f.__globals__)
if not isinstance(a, allowed_type):
raise Error("%r of type %r is not an instance of the allowed type %s "
"for %r" % (a, type(a), _type_repr(allowed_type), f))
return f(*args, **kwds)
return new_f
return check_accepts
def returns(*types):
"""A decorator which checks the return types of a function.
Based on:
http://stackoverflow.com/questions/15299878/how-to-use-python-decorators-to-check-function-arguments
The above draws from:
https://www.python.org/dev/peps/pep-0318/
Args:
*types: A list of Python types.
A list of one element corresponds to a single return value.
A list of several elements corresponds to several return values.
Note that a function with no explicit return value has an implicit
NoneType return and should be annotated correspondingly.
Returns:
A function to use as a decorator.
"""
def check_returns(f):
"""Check the types."""
if not types:
raise TypeError("A return type annotation must contain at least one type")
@functools.wraps(f)
def new_f(*args, **kwds):
"""A helper function."""
return_value = f(*args, **kwds)
if len(types) == 1:
# The function has a single return value.
allowed_type = _replace_forward_references(types[0], f.__globals__)
if not isinstance(return_value, allowed_type):
raise Error("%r of type %r is not an instance of the allowed type %s "
"for %r"
% (return_value, type(return_value),
_type_repr(allowed_type), f))
else:
if len(return_value) != len(types):
raise Error(
"Function %r has %d return values but only %d types were "
"provided in the annotation." %
(f, len(return_value), len(types)))
for (r, t) in zip(return_value, types):
allowed_type = _replace_forward_references(t, f.__globals__)
if not isinstance(r, allowed_type):
raise Error("%r of type %r is not an instance of allowed type %s "
"for %r" % (r, type(r), _type_repr(allowed_type), f))
return return_value
return new_f
return check_returns
| apache-2.0 | -8,318,946,204,424,049,000 | 847,175,620,816,461,000 | 30.568323 | 116 | 0.658042 | false |
python-ning/blog | blogchen/blog/migrations/0006_auto_20150831_2257.py | 1 | 1727 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_auto_20150830_2306'),
]
operations = [
migrations.AddField(
model_name='comment',
name='email',
field=models.EmailField(max_length=50, null=True, verbose_name=b'\xe9\x82\xae\xe7\xae\xb1\xe5\x9c\xb0\xe5\x9d\x80', blank=True),
),
migrations.AddField(
model_name='comment',
name='url',
field=models.URLField(max_length=100, null=True, verbose_name=b'\xe4\xb8\xaa\xe4\xba\xba\xe7\xbd\x91\xe9\xa1\xb5\xe5\x9c\xb0\xe5\x9d\x80', blank=True),
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(verbose_name=b'\xe7\x94\xa8\xe6\x88\xb7', blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AddField(
model_name='comment',
name='username',
field=models.CharField(max_length=30, null=True, verbose_name=b'\xe7\x94\xa8\xe6\x88\xb7\xe5\x90\x8d', blank=True),
),
migrations.AddField(
model_name='user',
name='url',
field=models.URLField(max_length=100, null=True, verbose_name=b'\xe4\xb8\xaa\xe4\xba\xba\xe7\xbd\x91\xe9\xa1\xb5\xe5\x9c\xb0\xe5\x9d\x80', blank=True),
),
migrations.AlterField(
model_name='article',
name='is_recommend',
field=models.BooleanField(default=1, verbose_name=b'\xe6\x98\xaf\xe5\x90\xa6\xe6\x8e\xa8\xe8\x8d\x90'),
),
]
| mpl-2.0 | -345,721,750,553,374,850 | 3,152,745,198,638,268,000 | 37.377778 | 163 | 0.593515 | false |
alisw/AliPhysics | PWGMM/MC/aligenqa/aligenqa/utils.py | 37 | 7697 | import os
import random
import string
import subprocess
import re
from rootpy import asrootpy
from rootpy.plotting import Graph
def gen_random_name():
"""Generate a random name for temp hists"""
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(25))
def get_est_dirs(sums, considered_ests):
return (somedir for somedir in sums if somedir.GetName() in considered_ests)
def make_estimator_title(name):
if name == 'EtaLt05':
return '|#eta|#leq0.5'
elif name == 'EtaLt08':
return '|#eta|#leq0.8'
elif name == 'EtaLt15':
return '|#eta|#leq1.5'
elif name == 'Eta08_15':
return '0.8#leq|#eta|#leq1.5'
else:
return name
def remap_x_values(hist, corr_hist):
"""
Map the x values of hist to the y values of map_hist.
In order to do so, it is necessary that the x values of hist are also present as x-values in map_hist.
Parameters
----------
hist : Hist1D
corr_hist : Hist2D
Correlations between the quantity on hist's x-axis (also corr_hist's xaxis) and the new
quantity to plot agains (on corr_hist's y-axis.
Returns
-------
Graph
Graph of the remapped hist. Errors are ??? TODO
"""
hist = asrootpy(hist)
corr_hist = asrootpy(corr_hist)
profx = asrootpy(corr_hist.ProfileX(gen_random_name()))
rt_graph = Graph()
for i, (nch_ref_bin, counter_bin) in enumerate(zip(profx.bins(), hist.bins())):
rt_graph.SetPoint(i, nch_ref_bin.value, counter_bin.value)
xerr, yerr = nch_ref_bin.error / 2.0, counter_bin.error / 2.0
rt_graph.SetPointError(i, xerr, xerr, yerr, yerr)
return rt_graph
def remove_zero_value_points(g):
# Remove the points backwards, since the index would change if we do it forwards
# The first point has index 0!
points_to_remove = []
for i, (x, y) in enumerate(g):
if not y > 0.0:
points_to_remove.append(i)
for p in points_to_remove[::-1]:
g.RemovePoint(p)
def remove_points_with_equal_x(g):
"""Remove all points which are on already occupied x values. Ie. The first point is kept, all later ones removed"""
points_to_remove = []
seen_x = []
for i, (x, y) in enumerate(g):
if x in seen_x:
points_to_remove.append(i)
else:
seen_x.append(x)
for p in points_to_remove[::-1]:
g.RemovePoint(p)
def remove_points_with_x_err_gt_1NchRef(g):
npoints = g.GetN()
points_to_remove = []
for idx in xrange(0, npoints):
if g.GetErrorX(idx) > 1:
points_to_remove.append(idx)
for p in points_to_remove[::-1]:
g.RemovePoint(p)
def remove_non_mutual_points(g1, g2):
"""Remove all points with do no have a corresponding point at the same x-value in the other hist"""
points_to_remove1 = []
points_to_remove2 = []
xs1 = [p[0] for p in g1]
xs2 = [p[0] for p in g2]
for i, x in enumerate(xs1):
if x not in xs2:
points_to_remove1.append(i)
for i, x in enumerate(xs2):
if x not in xs1:
points_to_remove2.append(i)
for p in points_to_remove1[::-1]:
g1.RemovePoint(p)
for p in points_to_remove2[::-1]:
g2.RemovePoint(p)
def percentile_bin_to_binidx_bin(percentile_bin, event_counter):
"""
Converts a given percentile interval (eg. (.5, .4)) to an interval of bin numbers of the given
event_counter histogram.
Parameters
----------
percentile_bin : tuple
Two percentiles, each withing 0-1. Needs to be decreasing
event_counter : Hist1D
Distribution of events over a classifier value
Returns
-------
tuple :
two bin numbers representing the given percentile. The first bin is inclusive, the second exclusive.
Ie. The bin numbers can be used directly in SetRange
Raises
------
ValueError :
The percentile specifies a range which is not found in the given event_counter histogram. It might be too
narrow.
"""
nbins = event_counter.GetXaxis().GetNbins()
ntotal_events = event_counter.Integral(1, nbins) # .Integral is a closed interval, as far as I can tell...
# fraction of events with greater or equal classifier values; hence decreasing values
frac_events_with_geq_classifier_value = [event_counter.Integral(binidx, nbins) / float(ntotal_events)
for binidx in range(1, nbins + 1)]
# small checks:
if frac_events_with_geq_classifier_value[0] != 1:
assert(0)
if len(frac_events_with_geq_classifier_value) != nbins:
assert(0)
# produce a list of bools, the first and last True are the first and last bin index
fraction_is_in_percentile_interval = lambda fraction: percentile_bin[0] >= fraction >= percentile_bin[1]
bin_is_in_percentile_interval = map(fraction_is_in_percentile_interval, frac_events_with_geq_classifier_value)
# get the indices of the elements that are True, sorry, this is a bit ugly
indices_of_bins_in_percentile_interval = [i for i, b in enumerate(bin_is_in_percentile_interval) if b]
# return the first and last binidx of the bins in the percentile interval; +1 for root binidx shit
try:
return (indices_of_bins_in_percentile_interval[0] + 1, indices_of_bins_in_percentile_interval[-1] + 1)
except IndexError:
# print "percentiles: "
# print frac_events_with_geq_classifier_value
raise ValueError("The given percentile interval did not match any bins in the given event_counter histogram")
def download_file(alien_path, local_path):
"""
Download a file from `alien_path` to `local`
Parameters
----------
alien_path, local_path : string
Full path to files
"""
if os.path.isfile(local_path):
raise ValueError("Local file exists")
try:
os.makedirs(os.path.dirname(local_path))
except OSError:
pass
alien_path = "alien:/" + alien_path
cp_cmd = ['alien_cp', '-v', '-s', alien_path, local_path]
p = subprocess.Popen(cp_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.wait()
if p.returncode != 0:
print "\n", p.stdout.read()
print("An error occued while downloading {0}; "
"The broken file was deleted.".format(local_path))
try:
os.remove(local_path)
except OSError:
pass
def get_generator_name_from_train(alien_path):
"""
Extract the generator name for an `AnalysisResults.root` file on alien_path.
Parameters
----------
alien_path :
Alien path to `AnalysisResults.root`
Returns
-------
str :
Generator name as stated in the train's `env.sh` file
"""
if not alien_path.startswith("alien:"):
alien_path = "alien:/" + alien_path
path_to_env = os.path.join(os.path.split(alien_path)[0], "..", "env.sh")
cp_cmd = ['alien_cp', '-v', '-s', path_to_env, ".env.sh"]
print "copying with: %s"%cp_cmd
subprocess.check_call(cp_cmd)
with open(".env.sh") as f:
for line in f.readlines():
if "PERIOD_NAME" in line:
gen_name = re.match(".*'(.+)'", line).groups()[-1]
break
return gen_name
def get_generator_name_from_filename(fname):
"""
Deduce the generator name from the file name as asigned when the
file was downloaded. Reduce underscores with spaces.
"""
name = re.match(r'.*\d+_\d{8}-\d{4}-(.+)\.root$', fname).groups()[-1]
return name.replace("_", " ")
| bsd-3-clause | 5,444,532,354,651,315,000 | 7,359,597,864,709,887,000 | 32.758772 | 119 | 0.615175 | false |
lancezlin/ml_template_py | lib/python2.7/site-packages/zmq/tests/test_future.py | 9 | 7520 | # coding: utf-8
# Copyright (c) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
from datetime import timedelta
import os
import pytest
gen = pytest.importorskip('tornado.gen')
import zmq
from zmq.eventloop import future
from zmq.eventloop.ioloop import IOLoop
from zmq.utils.strtypes import u
from zmq.tests import BaseZMQTestCase
class TestFutureSocket(BaseZMQTestCase):
Context = future.Context
def setUp(self):
self.loop = IOLoop()
self.loop.make_current()
super(TestFutureSocket, self).setUp()
def tearDown(self):
super(TestFutureSocket, self).tearDown()
self.loop.close(all_fds=True)
def test_socket_class(self):
s = self.context.socket(zmq.PUSH)
assert isinstance(s, future.Socket)
s.close()
def test_recv_multipart(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f = b.recv_multipart()
assert not f.done()
yield a.send(b'hi')
recvd = yield f
self.assertEqual(recvd, [b'hi'])
self.loop.run_sync(test)
def test_recv(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f1 = b.recv()
f2 = b.recv()
assert not f1.done()
assert not f2.done()
yield a.send_multipart([b'hi', b'there'])
recvd = yield f2
assert f1.done()
self.assertEqual(f1.result(), b'hi')
self.assertEqual(recvd, b'there')
self.loop.run_sync(test)
def test_recv_cancel(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f1 = b.recv()
f2 = b.recv_multipart()
assert f1.cancel()
assert f1.done()
assert not f2.done()
yield a.send_multipart([b'hi', b'there'])
recvd = yield f2
assert f1.cancelled()
assert f2.done()
self.assertEqual(recvd, [b'hi', b'there'])
self.loop.run_sync(test)
@pytest.mark.skipif(not hasattr(zmq, 'RCVTIMEO'), reason="requires RCVTIMEO")
def test_recv_timeout(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
b.rcvtimeo = 100
f1 = b.recv()
b.rcvtimeo = 1000
f2 = b.recv_multipart()
with pytest.raises(zmq.Again):
yield f1
yield a.send_multipart([b'hi', b'there'])
recvd = yield f2
assert f2.done()
self.assertEqual(recvd, [b'hi', b'there'])
self.loop.run_sync(test)
@pytest.mark.skipif(not hasattr(zmq, 'SNDTIMEO'), reason="requires SNDTIMEO")
def test_send_timeout(self):
@gen.coroutine
def test():
s = self.socket(zmq.PUSH)
s.sndtimeo = 100
with pytest.raises(zmq.Again):
yield s.send(b'not going anywhere')
self.loop.run_sync(test)
@pytest.mark.now
def test_send_noblock(self):
@gen.coroutine
def test():
s = self.socket(zmq.PUSH)
with pytest.raises(zmq.Again):
yield s.send(b'not going anywhere', flags=zmq.NOBLOCK)
self.loop.run_sync(test)
@pytest.mark.now
def test_send_multipart_noblock(self):
@gen.coroutine
def test():
s = self.socket(zmq.PUSH)
with pytest.raises(zmq.Again):
yield s.send_multipart([b'not going anywhere'], flags=zmq.NOBLOCK)
self.loop.run_sync(test)
def test_recv_string(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f = b.recv_string()
assert not f.done()
msg = u('πøøπ')
yield a.send_string(msg)
recvd = yield f
assert f.done()
self.assertEqual(f.result(), msg)
self.assertEqual(recvd, msg)
self.loop.run_sync(test)
def test_recv_json(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f = b.recv_json()
assert not f.done()
obj = dict(a=5)
yield a.send_json(obj)
recvd = yield f
assert f.done()
self.assertEqual(f.result(), obj)
self.assertEqual(recvd, obj)
self.loop.run_sync(test)
def test_recv_json_cancelled(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f = b.recv_json()
assert not f.done()
f.cancel()
# cycle eventloop to allow cancel events to fire
yield gen.sleep(0)
obj = dict(a=5)
yield a.send_json(obj)
with pytest.raises(future.CancelledError):
recvd = yield f
assert f.done()
# give it a chance to incorrectly consume the event
events = yield b.poll(timeout=5)
assert events
yield gen.sleep(0)
# make sure cancelled recv didn't eat up event
recvd = yield gen.with_timeout(timedelta(seconds=5), b.recv_json())
assert recvd == obj
self.loop.run_sync(test)
def test_recv_pyobj(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f = b.recv_pyobj()
assert not f.done()
obj = dict(a=5)
yield a.send_pyobj(obj)
recvd = yield f
assert f.done()
self.assertEqual(f.result(), obj)
self.assertEqual(recvd, obj)
self.loop.run_sync(test)
def test_poll(self):
@gen.coroutine
def test():
a, b = self.create_bound_pair(zmq.PUSH, zmq.PULL)
f = b.poll(timeout=0)
self.assertEqual(f.result(), 0)
f = b.poll(timeout=1)
assert not f.done()
evt = yield f
self.assertEqual(evt, 0)
f = b.poll(timeout=1000)
assert not f.done()
yield a.send_multipart([b'hi', b'there'])
evt = yield f
self.assertEqual(evt, zmq.POLLIN)
recvd = yield b.recv_multipart()
self.assertEqual(recvd, [b'hi', b'there'])
self.loop.run_sync(test)
def test_poll_raw(self):
@gen.coroutine
def test():
p = future.Poller()
# make a pipe
r, w = os.pipe()
r = os.fdopen(r, 'rb')
w = os.fdopen(w, 'wb')
# POLLOUT
p.register(r, zmq.POLLIN)
p.register(w, zmq.POLLOUT)
evts = yield p.poll(timeout=1)
evts = dict(evts)
assert r.fileno() not in evts
assert w.fileno() in evts
assert evts[w.fileno()] == zmq.POLLOUT
# POLLIN
p.unregister(w)
w.write(b'x')
w.flush()
evts = yield p.poll(timeout=1000)
evts = dict(evts)
assert r.fileno() in evts
assert evts[r.fileno()] == zmq.POLLIN
assert r.read(1) == b'x'
r.close()
w.close()
self.loop.run_sync(test)
| mit | 96,589,546,981,146,770 | 5,159,889,134,826,269,000 | 30.71308 | 82 | 0.521155 | false |
jiezhu2007/scrapy | tests/test_utils_deprecate.py | 140 | 10526 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import inspect
import unittest
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.deprecate import create_deprecated_class, update_classpath
from tests import mock
class MyWarning(UserWarning):
pass
class SomeBaseClass(object):
pass
class NewName(SomeBaseClass):
pass
class WarnWhenSubclassedTest(unittest.TestCase):
def _mywarnings(self, w, category=MyWarning):
return [x for x in w if x.category is MyWarning]
def test_no_warning_on_definition(self):
with warnings.catch_warnings(record=True) as w:
Deprecated = create_deprecated_class('Deprecated', NewName)
w = self._mywarnings(w)
self.assertEqual(w, [])
def test_subclassing_warning_message(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertEqual(
str(w[0].message),
"tests.test_utils_deprecate.UserClass inherits from "
"deprecated class tests.test_utils_deprecate.Deprecated, "
"please inherit from tests.test_utils_deprecate.NewName."
" (warning only on first subclass, there may be others)"
)
self.assertEqual(w[0].lineno, inspect.getsourcelines(UserClass)[1])
def test_custom_class_paths(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
new_class_path='foo.NewClass',
old_class_path='bar.OldClass',
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
_ = Deprecated()
w = self._mywarnings(w)
self.assertEqual(len(w), 2)
self.assertIn('foo.NewClass', str(w[0].message))
self.assertIn('bar.OldClass', str(w[0].message))
self.assertIn('foo.NewClass', str(w[1].message))
self.assertIn('bar.OldClass', str(w[1].message))
def test_subclassing_warns_only_on_direct_childs(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_once=False,
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
class NoWarnOnMe(UserClass):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertIn('UserClass', str(w[0].message))
def test_subclassing_warns_once_by_default(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
class FooClass(Deprecated):
pass
class BarClass(Deprecated):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertIn('UserClass', str(w[0].message))
def test_warning_on_instance(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
# ignore subclassing warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
class UserClass(Deprecated):
pass
with warnings.catch_warnings(record=True) as w:
_, lineno = Deprecated(), inspect.getlineno(inspect.currentframe())
_ = UserClass() # subclass instances don't warn
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertEqual(
str(w[0].message),
"tests.test_utils_deprecate.Deprecated is deprecated, "
"instantiate tests.test_utils_deprecate.NewName instead."
)
self.assertEqual(w[0].lineno, lineno)
def test_warning_auto_message(self):
with warnings.catch_warnings(record=True) as w:
Deprecated = create_deprecated_class('Deprecated', NewName)
class UserClass2(Deprecated):
pass
msg = str(w[0].message)
self.assertIn("tests.test_utils_deprecate.NewName", msg)
self.assertIn("tests.test_utils_deprecate.Deprecated", msg)
def test_issubclass(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
DeprecatedName = create_deprecated_class('DeprecatedName', NewName)
class UpdatedUserClass1(NewName):
pass
class UpdatedUserClass1a(NewName):
pass
class OutdatedUserClass1(DeprecatedName):
pass
class OutdatedUserClass1a(DeprecatedName):
pass
class UnrelatedClass(object):
pass
class OldStyleClass:
pass
assert issubclass(UpdatedUserClass1, NewName)
assert issubclass(UpdatedUserClass1a, NewName)
assert issubclass(UpdatedUserClass1, DeprecatedName)
assert issubclass(UpdatedUserClass1a, DeprecatedName)
assert issubclass(OutdatedUserClass1, DeprecatedName)
assert not issubclass(UnrelatedClass, DeprecatedName)
assert not issubclass(OldStyleClass, DeprecatedName)
assert not issubclass(OldStyleClass, DeprecatedName)
assert not issubclass(OutdatedUserClass1, OutdatedUserClass1a)
assert not issubclass(OutdatedUserClass1a, OutdatedUserClass1)
self.assertRaises(TypeError, issubclass, object(), DeprecatedName)
def test_isinstance(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
DeprecatedName = create_deprecated_class('DeprecatedName', NewName)
class UpdatedUserClass2(NewName):
pass
class UpdatedUserClass2a(NewName):
pass
class OutdatedUserClass2(DeprecatedName):
pass
class OutdatedUserClass2a(DeprecatedName):
pass
class UnrelatedClass(object):
pass
class OldStyleClass:
pass
assert isinstance(UpdatedUserClass2(), NewName)
assert isinstance(UpdatedUserClass2a(), NewName)
assert isinstance(UpdatedUserClass2(), DeprecatedName)
assert isinstance(UpdatedUserClass2a(), DeprecatedName)
assert isinstance(OutdatedUserClass2(), DeprecatedName)
assert isinstance(OutdatedUserClass2a(), DeprecatedName)
assert not isinstance(OutdatedUserClass2a(), OutdatedUserClass2)
assert not isinstance(OutdatedUserClass2(), OutdatedUserClass2a)
assert not isinstance(UnrelatedClass(), DeprecatedName)
assert not isinstance(OldStyleClass(), DeprecatedName)
def test_clsdict(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
Deprecated = create_deprecated_class('Deprecated', NewName, {'foo': 'bar'})
self.assertEqual(Deprecated.foo, 'bar')
def test_deprecate_a_class_with_custom_metaclass(self):
Meta1 = type('Meta1', (type,), {})
New = Meta1('New', (), {})
Deprecated = create_deprecated_class('Deprecated', New)
def test_deprecate_subclass_of_deprecated_class(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
AlsoDeprecated = create_deprecated_class('AlsoDeprecated', Deprecated,
new_class_path='foo.Bar',
warn_category=MyWarning)
w = self._mywarnings(w)
self.assertEqual(len(w), 0, str(map(str, w)))
with warnings.catch_warnings(record=True) as w:
AlsoDeprecated()
class UserClass(AlsoDeprecated):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 2)
self.assertIn('AlsoDeprecated', str(w[0].message))
self.assertIn('foo.Bar', str(w[0].message))
self.assertIn('AlsoDeprecated', str(w[1].message))
self.assertIn('foo.Bar', str(w[1].message))
def test_inspect_stack(self):
with mock.patch('inspect.stack', side_effect=IndexError):
with warnings.catch_warnings(record=True) as w:
DeprecatedName = create_deprecated_class('DeprecatedName', NewName)
class SubClass(DeprecatedName):
pass
self.assertIn("Error detecting parent module", str(w[0].message))
@mock.patch('scrapy.utils.deprecate.DEPRECATION_RULES',
[('scrapy.contrib.pipeline.', 'scrapy.pipelines.'),
('scrapy.contrib.', 'scrapy.extensions.')])
class UpdateClassPathTest(unittest.TestCase):
def test_old_path_gets_fixed(self):
with warnings.catch_warnings(record=True) as w:
output = update_classpath('scrapy.contrib.debug.Debug')
self.assertEqual(output, 'scrapy.extensions.debug.Debug')
self.assertEqual(len(w), 1)
self.assertIn("scrapy.contrib.debug.Debug", str(w[0].message))
self.assertIn("scrapy.extensions.debug.Debug", str(w[0].message))
def test_sorted_replacement(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
output = update_classpath('scrapy.contrib.pipeline.Pipeline')
self.assertEqual(output, 'scrapy.pipelines.Pipeline')
def test_unmatched_path_stays_the_same(self):
with warnings.catch_warnings(record=True) as w:
output = update_classpath('scrapy.unmatched.Path')
self.assertEqual(output, 'scrapy.unmatched.Path')
self.assertEqual(len(w), 0)
| bsd-3-clause | 7,131,925,777,426,815,000 | 5,063,428,349,151,257,000 | 36.459075 | 87 | 0.610583 | false |
nguyenfilip/subscription-manager | src/subscription_manager/isodate.py | 3 | 1113 | #
# find a reasonable iso8601 date parser
#
# Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import dateutil.parser
import logging
log = logging.getLogger('rhsm-app.' + __name__)
def _parse_date_dateutil(date):
# see comment for _parse_date_pyxml
try:
dt = dateutil.parser.parse(date)
except ValueError:
log.warning("Date overflow: %s, using 9999-09-06 instead." % date)
return dateutil.parser.parse("9999-09-06T00:00:00.000+0000")
return dt
parse_date = _parse_date_dateutil
parse_date_impl_name = 'dateutil'
| gpl-2.0 | -6,607,269,243,542,066,000 | -1,156,374,213,237,345,300 | 29.916667 | 74 | 0.725067 | false |
Crystalnix/BitPop | chrome/test/functional/chromeos_browser.py | 76 | 1981 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import pyauto_functional # pyauto_functional must come before pyauto.
import pyauto
import test_utils
class ChromeosBrowserTest(pyauto.PyUITest):
def testCannotCloseLastIncognito(self):
"""Verify that last incognito window cannot be closed if it's the
last window"""
self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW)
self.assertTrue(self.GetBrowserInfo()['windows'][1]['incognito'],
msg='Incognito window is not displayed')
self.CloseBrowserWindow(0)
info = self.GetBrowserInfo()['windows']
self.assertEqual(1, len(info))
url = info[0]['tabs'][0]['url']
self.assertEqual('chrome://newtab/', url,
msg='Unexpected URL: %s' % url)
self.assertTrue(info[0]['incognito'],
msg='Incognito window is not displayed.')
def testCrashBrowser(self):
"""Verify that after broswer crash is recovered, user can still navigate
to other URL."""
test_utils.CrashBrowser(self)
self.RestartBrowser(clear_profile=False)
url = self.GetHttpURLForDataPath('english_page.html')
self.NavigateToURL(url)
self.assertEqual('This page is in English', self.GetActiveTabTitle())
def testFullScreen(self):
"""Verify that a browser window can enter and exit full screen mode."""
self.ApplyAccelerator(pyauto.IDC_FULLSCREEN)
self.assertTrue(self.WaitUntil(lambda:
self.GetBrowserInfo()['windows'][0]['fullscreen']),
msg='Full Screen is not displayed.')
self.ApplyAccelerator(pyauto.IDC_FULLSCREEN)
self.assertTrue(self.WaitUntil(lambda: not
self.GetBrowserInfo()['windows'][0]['fullscreen']),
msg='Normal screen is not displayed.')
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause | -7,084,235,994,562,296,000 | 1,863,167,449,012,516,400 | 37.096154 | 76 | 0.671883 | false |
sebmarchand/syzygy | third_party/numpy/files/numpy/core/tests/test_arrayprint.py | 22 | 2816 | import numpy as np
from numpy.testing import *
class TestArrayRepr(object):
def test_nan_inf(self):
x = np.array([np.nan, np.inf])
assert_equal(repr(x), 'array([ nan, inf])')
class TestComplexArray(TestCase):
def test_str(self):
rvals = [0, 1, -1, np.inf, -np.inf, np.nan]
cvals = [complex(rp, ip) for rp in rvals for ip in rvals]
dtypes = [np.complex64, np.cdouble, np.clongdouble]
actual = [str(np.array([c], dt)) for c in cvals for dt in dtypes]
wanted = [
'[ 0.+0.j]', '[ 0.+0.j]', '[ 0.0+0.0j]',
'[ 0.+1.j]', '[ 0.+1.j]', '[ 0.0+1.0j]',
'[ 0.-1.j]', '[ 0.-1.j]', '[ 0.0-1.0j]',
'[ 0.+infj]', '[ 0.+infj]', '[ 0.0+infj]',
'[ 0.-infj]', '[ 0.-infj]', '[ 0.0-infj]',
'[ 0.+nanj]', '[ 0.+nanj]', '[ 0.0+nanj]',
'[ 1.+0.j]', '[ 1.+0.j]', '[ 1.0+0.0j]',
'[ 1.+1.j]', '[ 1.+1.j]', '[ 1.0+1.0j]',
'[ 1.-1.j]', '[ 1.-1.j]', '[ 1.0-1.0j]',
'[ 1.+infj]', '[ 1.+infj]', '[ 1.0+infj]',
'[ 1.-infj]', '[ 1.-infj]', '[ 1.0-infj]',
'[ 1.+nanj]', '[ 1.+nanj]', '[ 1.0+nanj]',
'[-1.+0.j]', '[-1.+0.j]', '[-1.0+0.0j]',
'[-1.+1.j]', '[-1.+1.j]', '[-1.0+1.0j]',
'[-1.-1.j]', '[-1.-1.j]', '[-1.0-1.0j]',
'[-1.+infj]', '[-1.+infj]', '[-1.0+infj]',
'[-1.-infj]', '[-1.-infj]', '[-1.0-infj]',
'[-1.+nanj]', '[-1.+nanj]', '[-1.0+nanj]',
'[ inf+0.j]', '[ inf+0.j]', '[ inf+0.0j]',
'[ inf+1.j]', '[ inf+1.j]', '[ inf+1.0j]',
'[ inf-1.j]', '[ inf-1.j]', '[ inf-1.0j]',
'[ inf+infj]', '[ inf+infj]', '[ inf+infj]',
'[ inf-infj]', '[ inf-infj]', '[ inf-infj]',
'[ inf+nanj]', '[ inf+nanj]', '[ inf+nanj]',
'[-inf+0.j]', '[-inf+0.j]', '[-inf+0.0j]',
'[-inf+1.j]', '[-inf+1.j]', '[-inf+1.0j]',
'[-inf-1.j]', '[-inf-1.j]', '[-inf-1.0j]',
'[-inf+infj]', '[-inf+infj]', '[-inf+infj]',
'[-inf-infj]', '[-inf-infj]', '[-inf-infj]',
'[-inf+nanj]', '[-inf+nanj]', '[-inf+nanj]',
'[ nan+0.j]', '[ nan+0.j]', '[ nan+0.0j]',
'[ nan+1.j]', '[ nan+1.j]', '[ nan+1.0j]',
'[ nan-1.j]', '[ nan-1.j]', '[ nan-1.0j]',
'[ nan+infj]', '[ nan+infj]', '[ nan+infj]',
'[ nan-infj]', '[ nan-infj]', '[ nan-infj]',
'[ nan+nanj]', '[ nan+nanj]', '[ nan+nanj]']
for res, val in zip(actual, wanted):
assert_(res == val)
if __name__ == "__main__":
run_module_suite()
| apache-2.0 | 1,423,785,925,726,123,000 | 6,715,458,922,829,014,000 | 48.403509 | 73 | 0.339134 | false |
Bismarrck/tensorflow | tensorflow/python/data/experimental/kernel_tests/dense_to_sparse_batch_test.py | 17 | 4431 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.experimental.dense_to_sparse_batch()."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.data.experimental.ops import batching
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class DenseToSparseBatchTest(test_base.DatasetTestBase):
def testDenseToSparseBatchDataset(self):
components = np.random.randint(12, size=(100,)).astype(np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices(
components).map(lambda x: array_ops.fill([x], x)).apply(
batching.dense_to_sparse_batch(4, [12]))
get_next = self.getNext(dataset)
for start in range(0, len(components), 4):
results = self.evaluate(get_next())
self.assertAllEqual([[i, j]
for i, c in enumerate(components[start:start + 4])
for j in range(c)], results.indices)
self.assertAllEqual(
[c for c in components[start:start + 4] for _ in range(c)],
results.values)
self.assertAllEqual([min(4,
len(components) - start), 12],
results.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
def testDenseToSparseBatchDatasetWithUnknownShape(self):
components = np.random.randint(5, size=(40,)).astype(np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices(
components).map(lambda x: array_ops.fill([x, x], x)).apply(
batching.dense_to_sparse_batch(4, [5, None]))
get_next = self.getNext(dataset)
for start in range(0, len(components), 4):
results = self.evaluate(get_next())
self.assertAllEqual([[i, j, z]
for i, c in enumerate(components[start:start + 4])
for j in range(c)
for z in range(c)], results.indices)
self.assertAllEqual([
c for c in components[start:start + 4] for _ in range(c)
for _ in range(c)
], results.values)
self.assertAllEqual([
min(4,
len(components) - start), 5,
np.max(components[start:start + 4])
], results.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
def testDenseToSparseBatchDatasetWithInvalidShape(self):
input_tensor = array_ops.constant([[1]])
with self.assertRaisesRegexp(ValueError, "Dimension -2 must be >= 0"):
dataset_ops.Dataset.from_tensors(input_tensor).apply(
batching.dense_to_sparse_batch(4, [-2]))
def testDenseToSparseBatchDatasetShapeErrors(self):
def dataset_fn(input_tensor):
return dataset_ops.Dataset.from_tensors(input_tensor).apply(
batching.dense_to_sparse_batch(4, [12]))
# Initialize with an input tensor of incompatible rank.
get_next = self.getNext(dataset_fn([[1]]))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"incompatible with the row shape"):
self.evaluate(get_next())
# Initialize with an input tensor that is larger than `row_shape`.
get_next = self.getNext(dataset_fn(np.int32(range(13))))
with self.assertRaisesRegexp(errors.DataLossError,
"larger than the row shape"):
self.evaluate(get_next())
if __name__ == "__main__":
test.main()
| apache-2.0 | 7,005,856,706,512,889,000 | 5,928,713,569,322,941,000 | 39.651376 | 80 | 0.647935 | false |
jrbourbeau/cr-composition | processing/legacy/anisotropy/random_trials/process_kstest.py | 2 | 7627 | #!/usr/bin/env python
import os
import argparse
import numpy as np
import pandas as pd
import pycondor
import comptools as comp
if __name__ == "__main__":
p = argparse.ArgumentParser(
description='Extracts and saves desired information from simulation/data .i3 files')
p.add_argument('-c', '--config', dest='config',
default='IC86.2012',
choices=['IC79', 'IC86.2012', 'IC86.2013', 'IC86.2014', 'IC86.2015'],
help='Detector configuration')
p.add_argument('--low_energy', dest='low_energy',
default=False, action='store_true',
help='Only use events with energy < 10**6.75 GeV')
p.add_argument('--n_side', dest='n_side', type=int,
default=64,
help='Number of times to split the DataFrame')
p.add_argument('--chunksize', dest='chunksize', type=int,
default=1000,
help='Number of lines used when reading in DataFrame')
p.add_argument('--n_batches', dest='n_batches', type=int,
default=50,
help='Number batches running in parallel for each ks-test trial')
p.add_argument('--ks_trials', dest='ks_trials', type=int,
default=100,
help='Number of random maps to generate')
p.add_argument('--overwrite', dest='overwrite',
default=False, action='store_true',
help='Option to overwrite reference map file, '
'if it alreadu exists')
p.add_argument('--test', dest='test',
default=False, action='store_true',
help='Option to run small test version')
args = p.parse_args()
if args.test:
args.ks_trials = 20
args.n_batches = 10000
args.chunksize = 100
# Define output directories
error = comp.paths.condor_data_dir + '/ks_test_{}/error'.format(args.config)
output = comp.paths.condor_data_dir + '/ks_test_{}/output'.format(args.config)
log = comp.paths.condor_scratch_dir + '/ks_test_{}/log'.format(args.config)
submit = comp.paths.condor_scratch_dir + '/ks_test_{}/submit'.format(args.config)
# Define path to executables
make_maps_ex = os.path.join(comp.paths.project_home,
'processing/anisotropy/ks_test_multipart',
'make_maps.py')
merge_maps_ex = os.path.join(comp.paths.project_home,
'processing/anisotropy/ks_test_multipart',
'merge_maps.py')
save_pvals_ex = os.path.join(comp.paths.project_home,
'processing/anisotropy/ks_test_multipart',
'save_pvals.py')
# Create Dagman instance
dag_name = 'anisotropy_kstest_{}'.format(args.config)
if args.test:
dag_name += '_test'
dagman = pycondor.Dagman(dag_name, submit=submit, verbose=1)
# Create Job for saving ks-test p-values for each trial
save_pvals_name = 'save_pvals_{}'.format(args.config)
if args.low_energy:
save_pvals_name += '_lowenergy'
save_pvals_job = pycondor.Job(save_pvals_name, save_pvals_ex,
error=error, output=output,
log=log, submit=submit,
verbose=1)
save_pvals_infiles_0 = []
save_pvals_infiles_1 = []
dagman.add_job(save_pvals_job)
outdir = os.path.join(comp.paths.comp_data_dir, args.config + '_data',
'anisotropy', 'random_splits')
if args.test:
outdir = os.path.join(outdir, 'test')
for trial_num in range(args.ks_trials):
# Create map_maps jobs for this ks_trial
make_maps_name = 'make_maps_{}_trial-{}'.format(args.config, trial_num)
if args.low_energy:
make_maps_name += '_lowenergy'
make_maps_job = pycondor.Job(make_maps_name, make_maps_ex,
error=error, output=output,
log=log, submit=submit,
verbose=1)
dagman.add_job(make_maps_job)
merge_maps_infiles_0 = []
merge_maps_infiles_1 = []
for batch_idx in range(args.n_batches):
if args.test and batch_idx > 2:
break
outfile_sample_1 = os.path.join(outdir,
'random_split_1_trial-{}_batch-{}.fits'.format(trial_num, batch_idx))
outfile_sample_0 = os.path.join(outdir,
'random_split_0_trial-{}_batch-{}.fits'.format(trial_num, batch_idx))
make_maps_arg_list = []
make_maps_arg_list.append('--config {}'.format(args.config))
make_maps_arg_list.append('--n_side {}'.format(args.n_side))
make_maps_arg_list.append('--chunksize {}'.format(args.chunksize))
make_maps_arg_list.append('--n_batches {}'.format(args.n_batches))
make_maps_arg_list.append('--batch_idx {}'.format(batch_idx))
make_maps_arg_list.append('--outfile_sample_0 {}'.format(outfile_sample_0))
make_maps_arg_list.append('--outfile_sample_1 {}'.format(outfile_sample_1))
make_maps_arg = ' '.join(make_maps_arg_list)
if args.low_energy:
make_maps_arg += ' --low_energy'
make_maps_job.add_arg(make_maps_arg)
# Add this outfile to the list of infiles for merge_maps_job
merge_maps_infiles_0.append(outfile_sample_0)
merge_maps_infiles_1.append(outfile_sample_1)
for sample_idx, input_file_list in enumerate([merge_maps_infiles_0,
merge_maps_infiles_1]):
merge_maps_name = 'merge_maps_{}_trial-{}_split-{}'.format(args.config, trial_num, sample_idx)
if args.low_energy:
merge_maps_name += '_lowenergy'
merge_maps_job = pycondor.Job(merge_maps_name, merge_maps_ex,
error=error, output=output,
log=log, submit=submit,
verbose=1)
# Ensure that make_maps_job completes before merge_maps_job begins
make_maps_job.add_child(merge_maps_job)
merge_maps_job.add_child(save_pvals_job)
dagman.add_job(merge_maps_job)
merge_infiles_str = ' '.join(input_file_list)
# Assemble merged output file path
merge_outfile = os.path.join(outdir, 'random_split_{}_trial-{}.fits'.format(sample_idx, trial_num))
merge_maps_arg = '--infiles {} --outfile {}'.format(merge_infiles_str, merge_outfile)
merge_maps_job.add_arg(merge_maps_arg)
if sample_idx == 0:
save_pvals_infiles_0.append(merge_outfile)
else:
save_pvals_infiles_1.append(merge_outfile)
save_pvals_infiles_0_str = ' '.join(save_pvals_infiles_0)
save_pvals_infiles_1_str = ' '.join(save_pvals_infiles_1)
if args.low_energy:
outfile_basename = 'ks_test_dataframe_lowenergy.hdf'
else:
outfile_basename = 'ks_test_dataframe.hdf'
outfile = os.path.join(outdir, outfile_basename)
save_pvals_arg = '--infiles_sample_0 {} --infiles_sample_1 {} ' \
'--outfile {}'.format(save_pvals_infiles_0_str, save_pvals_infiles_1_str, outfile)
save_pvals_job.add_arg(save_pvals_arg)
dagman.build_submit(fancyname=True)
| mit | 7,126,298,119,913,943,000 | -6,893,143,240,947,019,000 | 44.39881 | 111 | 0.554215 | false |
ar45/django | django/core/files/utils.py | 395 | 1338 | from django.utils import six
class FileProxyMixin(object):
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.xreadlines)
if six.PY3:
seekable = property(lambda self: self.file.seekable)
def __iter__(self):
return iter(self.file)
| bsd-3-clause | -2,964,862,908,089,697,000 | 5,553,599,083,219,569,000 | 38.352941 | 69 | 0.688341 | false |
chacoroot/planetary | addons/crm_partner_assign/__openerp__.py | 244 | 2369 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Partner Assignation & Geolocation',
'version': '1.0',
'category': 'Customer Relationship Management',
'description': """
This is the module used by OpenERP SA to redirect customers to its partners, based on geolocation.
======================================================================================================
This modules lets you geolocate Leads, Opportunities and Partners based on their address.
Once the coordinates of the Lead/Opportunity is known, they can be automatically assigned
to an appropriate local partner, based on the distance and the weight that was assigned to the partner.
""",
'author': 'OpenERP SA',
'depends': ['base_geolocalize', 'crm', 'account', 'portal'],
'data': [
'security/ir.model.access.csv',
'res_partner_view.xml',
'wizard/crm_forward_to_partner_view.xml',
'wizard/crm_channel_interested_view.xml',
'crm_lead_view.xml',
'crm_partner_assign_data.xml',
'crm_portal_view.xml',
'portal_data.xml',
'report/crm_lead_report_view.xml',
'report/crm_partner_report_view.xml',
],
'demo': [
'res_partner_demo.xml',
'crm_lead_demo.xml'
],
'test': ['test/partner_assign.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -3,872,076,340,245,024,300 | 5,958,490,528,430,901,000 | 39.152542 | 103 | 0.598565 | false |
tadebayo/myedge | myvenv/Lib/site-packages/django/dispatch/dispatcher.py | 55 | 11693 | import sys
import threading
import warnings
import weakref
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.inspect import func_accepts_kwargs
from django.utils.six.moves import range
if six.PY2:
from .weakref_backports import WeakMethod
else:
from weakref import WeakMethod
def _make_id(target):
if hasattr(target, '__func__'):
return (id(target.__self__), id(target.__func__))
return id(target)
NONE_ID = _make_id(None)
# A marker for caching
NO_RECEIVERS = object()
class Signal(object):
"""
Base class for all signals
Internal attributes:
receivers
{ receiverkey (id) : weakref(receiver) }
"""
def __init__(self, providing_args=None, use_caching=False):
"""
Create a new signal.
providing_args
A list of the arguments this signal can pass along in a send() call.
"""
self.receivers = []
if providing_args is None:
providing_args = []
self.providing_args = set(providing_args)
self.lock = threading.Lock()
self.use_caching = use_caching
# For convenience we create empty caches even if they are not used.
# A note about caching: if use_caching is defined, then for each
# distinct sender we cache the receivers that sender has in
# 'sender_receivers_cache'. The cache is cleaned when .connect() or
# .disconnect() is called and populated on send().
self.sender_receivers_cache = weakref.WeakKeyDictionary() if use_caching else {}
self._dead_receivers = False
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
"""
Connect receiver to sender for signal.
Arguments:
receiver
A function or an instance method which is to receive signals.
Receivers must be hashable objects.
If weak is True, then receiver must be weak referenceable.
Receivers must be able to accept keyword arguments.
If a receiver is connected with a dispatch_uid argument, it
will not be added if another receiver was already connected
with that dispatch_uid.
sender
The sender to which the receiver should respond. Must either be
a Python object, or None to receive events from any sender.
weak
Whether to use weak references to the receiver. By default, the
module will attempt to use weak references to the receiver
objects. If this parameter is false, then strong references will
be used.
dispatch_uid
An identifier used to uniquely identify a particular instance of
a receiver. This will usually be a string, though it may be
anything hashable.
"""
from django.conf import settings
# If DEBUG is on, check that we got a good receiver
if settings.configured and settings.DEBUG:
assert callable(receiver), "Signal receivers must be callable."
# Check for **kwargs
if not func_accepts_kwargs(receiver):
raise ValueError("Signal receivers must accept keyword arguments (**kwargs).")
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
if weak:
ref = weakref.ref
receiver_object = receiver
# Check for bound methods
if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
ref = WeakMethod
receiver_object = receiver.__self__
if six.PY3:
receiver = ref(receiver)
weakref.finalize(receiver_object, self._remove_receiver)
else:
receiver = ref(receiver, self._remove_receiver)
with self.lock:
self._clear_dead_receivers()
for r_key, _ in self.receivers:
if r_key == lookup_key:
break
else:
self.receivers.append((lookup_key, receiver))
self.sender_receivers_cache.clear()
def disconnect(self, receiver=None, sender=None, weak=None, dispatch_uid=None):
"""
Disconnect receiver from sender for signal.
If weak references are used, disconnect need not be called. The receiver
will be remove from dispatch automatically.
Arguments:
receiver
The registered receiver to disconnect. May be none if
dispatch_uid is specified.
sender
The registered sender to disconnect
dispatch_uid
the unique identifier of the receiver to disconnect
"""
if weak is not None:
warnings.warn("Passing `weak` to disconnect has no effect.", RemovedInDjango20Warning, stacklevel=2)
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
disconnected = False
with self.lock:
self._clear_dead_receivers()
for index in range(len(self.receivers)):
(r_key, _) = self.receivers[index]
if r_key == lookup_key:
disconnected = True
del self.receivers[index]
break
self.sender_receivers_cache.clear()
return disconnected
def has_listeners(self, sender=None):
return bool(self._live_receivers(sender))
def send(self, sender, **named):
"""
Send signal from sender to all connected receivers.
If any receiver raises an error, the error propagates back through send,
terminating the dispatch loop. So it's possible that all receivers
won't be called if an error is raised.
Arguments:
sender
The sender of the signal. Either a specific object or None.
named
Named arguments which will be passed to receivers.
Returns a list of tuple pairs [(receiver, response), ... ].
"""
responses = []
if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return responses
for receiver in self._live_receivers(sender):
response = receiver(signal=self, sender=sender, **named)
responses.append((receiver, response))
return responses
def send_robust(self, sender, **named):
"""
Send signal from sender to all connected receivers catching errors.
Arguments:
sender
The sender of the signal. Can be any python object (normally one
registered with a connect if you actually want something to
occur).
named
Named arguments which will be passed to receivers. These
arguments must be a subset of the argument names defined in
providing_args.
Return a list of tuple pairs [(receiver, response), ... ]. May raise
DispatcherKeyError.
If any receiver raises an error (specifically any subclass of
Exception), the error instance is returned as the result for that
receiver. The traceback is always attached to the error at
``__traceback__``.
"""
responses = []
if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return responses
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
for receiver in self._live_receivers(sender):
try:
response = receiver(signal=self, sender=sender, **named)
except Exception as err:
if not hasattr(err, '__traceback__'):
err.__traceback__ = sys.exc_info()[2]
responses.append((receiver, err))
else:
responses.append((receiver, response))
return responses
def _clear_dead_receivers(self):
# Note: caller is assumed to hold self.lock.
if self._dead_receivers:
self._dead_receivers = False
new_receivers = []
for r in self.receivers:
if isinstance(r[1], weakref.ReferenceType) and r[1]() is None:
continue
new_receivers.append(r)
self.receivers = new_receivers
def _live_receivers(self, sender):
"""
Filter sequence of receivers to get resolved, live receivers.
This checks for weak references and resolves them, then returning only
live receivers.
"""
receivers = None
if self.use_caching and not self._dead_receivers:
receivers = self.sender_receivers_cache.get(sender)
# We could end up here with NO_RECEIVERS even if we do check this case in
# .send() prior to calling _live_receivers() due to concurrent .send() call.
if receivers is NO_RECEIVERS:
return []
if receivers is None:
with self.lock:
self._clear_dead_receivers()
senderkey = _make_id(sender)
receivers = []
for (receiverkey, r_senderkey), receiver in self.receivers:
if r_senderkey == NONE_ID or r_senderkey == senderkey:
receivers.append(receiver)
if self.use_caching:
if not receivers:
self.sender_receivers_cache[sender] = NO_RECEIVERS
else:
# Note, we must cache the weakref versions.
self.sender_receivers_cache[sender] = receivers
non_weak_receivers = []
for receiver in receivers:
if isinstance(receiver, weakref.ReferenceType):
# Dereference the weak reference.
receiver = receiver()
if receiver is not None:
non_weak_receivers.append(receiver)
else:
non_weak_receivers.append(receiver)
return non_weak_receivers
def _remove_receiver(self, receiver=None):
# Mark that the self.receivers list has dead weakrefs. If so, we will
# clean those up in connect, disconnect and _live_receivers while
# holding self.lock. Note that doing the cleanup here isn't a good
# idea, _remove_receiver() will be called as side effect of garbage
# collection, and so the call can happen while we are already holding
# self.lock.
self._dead_receivers = True
def receiver(signal, **kwargs):
"""
A decorator for connecting receivers to signals. Used by passing in the
signal (or list of signals) and keyword arguments to connect::
@receiver(post_save, sender=MyModel)
def signal_receiver(sender, **kwargs):
...
@receiver([post_save, post_delete], sender=MyModel)
def signals_receiver(sender, **kwargs):
...
"""
def _decorator(func):
if isinstance(signal, (list, tuple)):
for s in signal:
s.connect(func, **kwargs)
else:
signal.connect(func, **kwargs)
return func
return _decorator
| mit | 6,806,261,943,953,047,000 | 5,133,982,569,445,866,000 | 36.003165 | 112 | 0.584196 | false |
hitsthings/node-gyp | gyp/test/mac/gyptest-rebuild.py | 299 | 1260 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that app bundles are rebuilt correctly.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
CHDIR = 'rebuild'
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', 'test_app', chdir=CHDIR)
# Touch a source file, rebuild, and check that the app target is up-to-date.
test.touch('rebuild/main.c')
test.build('test.gyp', 'test_app', chdir=CHDIR)
test.up_to_date('test.gyp', 'test_app', chdir=CHDIR)
# Xcode runs postbuilds on every build, so targets with postbuilds are
# never marked as up_to_date.
if test.format != 'xcode':
# Same for a framework bundle.
test.build('test.gyp', 'test_framework_postbuilds', chdir=CHDIR)
test.up_to_date('test.gyp', 'test_framework_postbuilds', chdir=CHDIR)
# Test that an app bundle with a postbuild that touches the app binary needs
# to be built only once.
test.build('test.gyp', 'test_app_postbuilds', chdir=CHDIR)
test.up_to_date('test.gyp', 'test_app_postbuilds', chdir=CHDIR)
test.pass_test()
| mit | -5,405,751,075,806,026,000 | 2,604,627,741,100,338,000 | 29.731707 | 80 | 0.690476 | false |
tobes/raspberry-DMX | dmx2.py | 1 | 2777 | # -*- coding: utf-8 -*-
# http://www.jonshouse.co.uk/rpidmx512.cgi
from twisted.internet import protocol, reactor, endpoints
from pigpio import pi, pulse as pig_pulse, OUTPUT
PORT = 50007
# pins
PIN_DI = 18 # Driver Input
PIN_DE = 23 # Driver output enable (high to enable)
PIN_RE = 24 # Receive output enable (low to enable)
PIN_NULL = 0
DI = 1 << PIN_DI
# timings in µ seconds
BREAK = 88
MAB = 8
BIT = 4
MTBP = 50 # Time between packets
SLEEP_TIME = 5
pulses = []
pig = None
def high(duration=BIT):
return pig_pulse(0, DI, duration)
def low(duration=BIT):
return pig_pulse(DI, 0, duration)
def create_value(value):
# start (low for one bit)
# 8 data bits
# stop (high for two bits)
out = []
def write_pulse():
if bits:
if current:
out.append(high(BIT * bits))
else:
out.append(low(BIT * bits))
value = value << 1 | 1536
bits = 0
current = None
for bit in range(11):
bit_value = bool(1 << bit & value)
if bit_value == current:
bits += 1
continue
write_pulse()
current = bit_value
bits = 1
write_pulse()
return out
# precalculate pulses
pulse_values = [create_value(x) for x in range(256)]
def build_waveform(values):
# clear pulses
pulses = []
# Break (low)
pulses.append(low(BREAK))
# Mark after break (high)
pulses.append(high(MAB))
# Channel data
for value in values:
pulses += pulse_values[value]
# End of data (leave high)
pulses.append(high(MTBP))
return pulses
# set up gpio
if True:
pig = pi()
pig.set_mode(PIN_RE, OUTPUT)
pig.set_mode(PIN_DE, OUTPUT)
pig.set_mode(PIN_DI, OUTPUT)
pig.write(PIN_RE, 0) # disable Receive Enable
pig.write(PIN_DE, 1) # enable Driver Enable
pig.write(PIN_DI, 1) # high is the rest state
def send(values):
pig.wave_clear() # clear any existing waveforms
pig.wave_add_generic(build_waveform(values))
wave = pig.wave_create()
pig.wave_send_once(wave)
class Dmx(protocol.Protocol):
def connectionMade(self):
print "Client Connected Detected!"
# enable keepalive if supported
try:
self.transport.setTcpKeepAlive(1)
except AttributeError:
pass
def connectionLost(self, reason):
print "Client Connection Lost!"
def dataReceived(self, data):
data = [int(data[i:i + 2], 16) for i in range(0, len(data), 2)]
send(data)
class DmxFactory(protocol.Factory):
def buildProtocol(self, addr):
return Dmx()
if __name__ == '__main__':
endpoints.serverFromString(reactor, "tcp:%s" % PORT).listen(DmxFactory())
reactor.run()
| gpl-2.0 | -3,714,737,163,108,157,400 | 6,893,467,190,153,479,000 | 20.030303 | 77 | 0.602305 | false |
rest-of/the-deck | lambda/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py | 619 | 2168 | from io import BytesIO
class CallbackFileWrapper(object):
"""
Small wrapper around a fp object which will tee everything read into a
buffer, and when that file is closed it will execute a callback with the
contents of that buffer.
All attributes are proxied to the underlying file object.
This class uses members with a double underscore (__) leading prefix so as
not to accidentally shadow an attribute.
"""
def __init__(self, fp, callback):
self.__buf = BytesIO()
self.__fp = fp
self.__callback = callback
def __getattr__(self, name):
# The vaguaries of garbage collection means that self.__fp is
# not always set. By using __getattribute__ and the private
# name[0] allows looking up the attribute value and raising an
# AttributeError when it doesn't exist. This stop thigns from
# infinitely recursing calls to getattr in the case where
# self.__fp hasn't been set.
#
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
fp = self.__getattribute__('_CallbackFileWrapper__fp')
return getattr(fp, name)
def __is_fp_closed(self):
try:
return self.__fp.fp is None
except AttributeError:
pass
try:
return self.__fp.closed
except AttributeError:
pass
# We just don't cache it then.
# TODO: Add some logging here...
return False
def read(self, amt=None):
data = self.__fp.read(amt)
self.__buf.write(data)
if self.__is_fp_closed():
if self.__callback:
self.__callback(self.__buf.getvalue())
# We assign this to None here, because otherwise we can get into
# really tricky problems where the CPython interpreter dead locks
# because the callback is holding a reference to something which
# has a __del__ method. Setting this to None breaks the cycle
# and allows the garbage collector to do it's thing normally.
self.__callback = None
return data
| mit | 1,791,290,241,385,313,000 | 4,138,019,252,109,874,700 | 33.412698 | 83 | 0.611162 | false |
paulfurley/festival-weather | festivalweather/parser.py | 1 | 3354 | from __future__ import unicode_literals
import datetime
import pytz
import lxml.etree
from .two_hourly_forecast import TwoHourlyForecast
"""
The Norwegian weather service API 1.9 returns a pretty cool data format, albeit
encoded in XML. It contains two types of forecasts:
1. point-in-time (from="2014-08-03T19:00:00Z" to="2014-08-03T19:00:00Z")
2. range (from="2014-08-03T18:00:00Z" to="2014-08-03T19:00:00Z"
The point-in-time forecasts it gives lots of data like this:
<temperature id="TTT" unit="celsius" value="15.5"/>
<windDirection id="dd" deg="217.1" name="SW"/>
<windSpeed id="ff" mps="4.2" beaufort="3" name="Lett bris"/>
<humidity value="76.0" unit="percent"/>
<pressure id="pr" unit="hPa" value="1010.7"/>
<cloudiness id="NN" percent="17.7"/>
<fog id="FOG" percent="0.0"/>
<lowClouds id="LOW" percent="17.7"/>
<mediumClouds id="MEDIUM" percent="0.0"/>
<highClouds id="HIGH" percent="0.0"/>
<dewpointTemperature id="TD" unit="celsius" value="11.4"/>
Whereas for ranges it just gives:
<precipitation unit="mm" value="0.0" minvalue="0.0" maxvalue="0.0"/>
<symbol id="LightCloud" number="2"/>
For your convenience, it seems, 1, 2, 3, 4 and 6 hour ranges are available.
"""
def get_two_hourly_forecasts(xml_f, utc_startfrom, timezone, count):
root = lxml.etree.fromstring(xml_f.read())
point_time_forecasts = parse_point_time_forecasts(root)
two_hour_range_forecasts = parse_two_hour_range_forecasts(root)
for hour_offset in range(0, count * 2, 2):
forecast_time = utc_startfrom + datetime.timedelta(hours=hour_offset)
temperature = parse_temperature(point_time_forecasts[forecast_time])
min_rain, max_rain = parse_precipitation(
two_hour_range_forecasts[forecast_time])
yield make_two_hourly_forecast(forecast_time, timezone, temperature,
min_rain, max_rain)
def parse_point_time_forecasts(lxml):
result = {}
xpath = '//weatherdata/product[@class="pointData"]/time[@from=@to]'
for node in lxml.xpath(xpath):
result[parse_datetime(node.get('from'))] = node
return result
def parse_two_hour_range_forecasts(lxml):
result = {}
xpath = '//weatherdata/product[@class="pointData"]/time[@from]'
for node in lxml.xpath(xpath):
from_ = parse_datetime(node.get('from'))
to = parse_datetime(node.get('to'))
if to - from_ == datetime.timedelta(hours=2):
result[from_] = node
return result
def parse_datetime(string):
return datetime.datetime.strptime(string, '%Y-%m-%dT%H:%M:%SZ').replace(
tzinfo=pytz.UTC)
def parse_temperature(time_node):
temp = time_node.xpath('./location/temperature[@unit="celsius"]/@value')[0]
return float(temp)
def parse_precipitation(time_node):
min_ = time_node.xpath('./location/precipitation[@unit="mm"]/@minvalue')[0]
max_ = time_node.xpath('./location/precipitation[@unit="mm"]/@maxvalue')[0]
return float(min_), float(max_)
def make_two_hourly_forecast(utc_time, timezone, temperature, min_rain,
max_rain):
return TwoHourlyForecast(
start_datetime=utc_time.astimezone(pytz.timezone(timezone)),
temperature=temperature,
min_rain=min_rain,
max_rain=max_rain)
| mit | 881,793,964,316,404,100 | 2,048,656,122,660,562,400 | 32.54 | 79 | 0.651163 | false |
dask-image/dask-ndfourier | dask_ndfourier/_compat.py | 1 | 1574 | # -*- coding: utf-8 -*-
"""
Content here is borrowed from our contributions to Dask.
"""
import numpy
import dask.array
def _fftfreq_block(i, n, d):
r = i.copy()
r[i >= (n + 1) // 2] -= n
r /= n * d
return r
def _fftfreq(n, d=1.0, chunks=None):
"""
Return the Discrete Fourier Transform sample frequencies.
The returned float array `f` contains the frequency bin centers in cycles
per unit of the sample spacing (with zero at the start). For instance, if
the sample spacing is in seconds, then the frequency unit is cycles/second.
Given a window length `n` and a sample spacing `d`::
f = [0, 1, ..., n/2-1, -n/2, ..., -1] / (d*n) if n is even
f = [0, 1, ..., (n-1)/2, -(n-1)/2, ..., -1] / (d*n) if n is odd
Parameters
----------
n : int
Window length.
d : scalar, optional
Sample spacing (inverse of the sampling rate). Defaults to 1.
Returns
-------
grid : dask array
Examples
--------
>>> signal = np.array([-2, 8, 6, 4, 1, 0, 3, 5], dtype=float)
>>> fourier = np.fft.fft(signal)
>>> n = signal.size
>>> timestep = 0.1
>>> freq = np.fft.fftfreq(n, d=timestep)
>>> freq
array([ 0. , 1.25, 2.5 , 3.75, -5. , -3.75, -2.5 , -1.25])
Notes
-----
Borrowed from my Dask Array contribution.
"""
n = int(n)
d = float(d)
r = dask.array.arange(n, dtype=float, chunks=chunks)
return r.map_blocks(_fftfreq_block, dtype=float, n=n, d=d)
_sinc = dask.array.ufunc.wrap_elemwise(numpy.sinc)
| bsd-3-clause | 3,698,980,897,883,937,300 | 781,959,842,557,330,800 | 22.848485 | 79 | 0.550191 | false |
nkgilley/home-assistant | homeassistant/components/anthemav/media_player.py | 21 | 5438 | """Support for Anthem Network Receivers and Processors."""
import logging
import anthemav
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "anthemav"
DEFAULT_PORT = 14999
SUPPORT_ANTHEMAV = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up our socket to the AVR."""
host = config[CONF_HOST]
port = config[CONF_PORT]
name = config.get(CONF_NAME)
device = None
_LOGGER.info("Provisioning Anthem AVR device at %s:%d", host, port)
@callback
def async_anthemav_update_callback(message):
"""Receive notification from transport that new data exists."""
_LOGGER.debug("Received update callback from AVR: %s", message)
async_dispatcher_send(hass, DOMAIN)
avr = await anthemav.Connection.create(
host=host, port=port, update_callback=async_anthemav_update_callback
)
device = AnthemAVR(avr, name)
_LOGGER.debug("dump_devicedata: %s", device.dump_avrdata)
_LOGGER.debug("dump_conndata: %s", avr.dump_conndata)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, device.avr.close)
async_add_entities([device])
class AnthemAVR(MediaPlayerEntity):
"""Entity reading values from Anthem AVR protocol."""
def __init__(self, avr, name):
"""Initialize entity with transport."""
super().__init__()
self.avr = avr
self._name = name
def _lookup(self, propname, dval=None):
return getattr(self.avr.protocol, propname, dval)
async def async_added_to_hass(self):
"""When entity is added to hass."""
self.async_on_remove(
async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state)
)
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_ANTHEMAV
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return name of device."""
return self._name or self._lookup("model")
@property
def state(self):
"""Return state of power on/off."""
pwrstate = self._lookup("power")
if pwrstate is True:
return STATE_ON
if pwrstate is False:
return STATE_OFF
return None
@property
def is_volume_muted(self):
"""Return boolean reflecting mute state on device."""
return self._lookup("mute", False)
@property
def volume_level(self):
"""Return volume level from 0 to 1."""
return self._lookup("volume_as_percentage", 0.0)
@property
def media_title(self):
"""Return current input name (closest we have to media title)."""
return self._lookup("input_name", "No Source")
@property
def app_name(self):
"""Return details about current video and audio stream."""
return (
f"{self._lookup('video_input_resolution_text', '')} "
f"{self._lookup('audio_input_name', '')}"
)
@property
def source(self):
"""Return currently selected input."""
return self._lookup("input_name", "Unknown")
@property
def source_list(self):
"""Return all active, configured inputs."""
return self._lookup("input_list", ["Unknown"])
async def async_select_source(self, source):
"""Change AVR to the designated source (by name)."""
self._update_avr("input_name", source)
async def async_turn_off(self):
"""Turn AVR power off."""
self._update_avr("power", False)
async def async_turn_on(self):
"""Turn AVR power on."""
self._update_avr("power", True)
async def async_set_volume_level(self, volume):
"""Set AVR volume (0 to 1)."""
self._update_avr("volume_as_percentage", volume)
async def async_mute_volume(self, mute):
"""Engage AVR mute."""
self._update_avr("mute", mute)
def _update_avr(self, propname, value):
"""Update a property in the AVR."""
_LOGGER.info("Sending command to AVR: set %s to %s", propname, str(value))
setattr(self.avr.protocol, propname, value)
@property
def dump_avrdata(self):
"""Return state of avr object for debugging forensics."""
attrs = vars(self)
items_string = ", ".join(f"{item}: {item}" for item in attrs.items())
return f"dump_avrdata: {items_string}"
| apache-2.0 | 7,877,408,644,370,110,000 | 4,689,272,760,940,079,000 | 27.772487 | 86 | 0.629459 | false |
katiecheng/Bombolone | env/lib/python2.7/site-packages/Crypto/Cipher/blockalgo.py | 133 | 12596 | # -*- coding: utf-8 -*-
#
# Cipher/blockalgo.py
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Module with definitions common to all block ciphers."""
import sys
if sys.version_info[0] == 2 and sys.version_info[1] == 1:
from Crypto.Util.py21compat import *
from Crypto.Util.py3compat import *
#: *Electronic Code Book (ECB)*.
#: This is the simplest encryption mode. Each of the plaintext blocks
#: is directly encrypted into a ciphertext block, independently of
#: any other block. This mode exposes frequency of symbols
#: in your plaintext. Other modes (e.g. *CBC*) should be used instead.
#:
#: See `NIST SP800-38A`_ , Section 6.1 .
#:
#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf
MODE_ECB = 1
#: *Cipher-Block Chaining (CBC)*. Each of the ciphertext blocks depends
#: on the current and all previous plaintext blocks. An Initialization Vector
#: (*IV*) is required.
#:
#: The *IV* is a data block to be transmitted to the receiver.
#: The *IV* can be made public, but it must be authenticated by the receiver and
#: it should be picked randomly.
#:
#: See `NIST SP800-38A`_ , Section 6.2 .
#:
#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf
MODE_CBC = 2
#: *Cipher FeedBack (CFB)*. This mode is similar to CBC, but it transforms
#: the underlying block cipher into a stream cipher. Plaintext and ciphertext
#: are processed in *segments* of **s** bits. The mode is therefore sometimes
#: labelled **s**-bit CFB. An Initialization Vector (*IV*) is required.
#:
#: When encrypting, each ciphertext segment contributes to the encryption of
#: the next plaintext segment.
#:
#: This *IV* is a data block to be transmitted to the receiver.
#: The *IV* can be made public, but it should be picked randomly.
#: Reusing the same *IV* for encryptions done with the same key lead to
#: catastrophic cryptographic failures.
#:
#: See `NIST SP800-38A`_ , Section 6.3 .
#:
#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf
MODE_CFB = 3
#: This mode should not be used.
MODE_PGP = 4
#: *Output FeedBack (OFB)*. This mode is very similar to CBC, but it
#: transforms the underlying block cipher into a stream cipher.
#: The keystream is the iterated block encryption of an Initialization Vector (*IV*).
#:
#: The *IV* is a data block to be transmitted to the receiver.
#: The *IV* can be made public, but it should be picked randomly.
#:
#: Reusing the same *IV* for encryptions done with the same key lead to
#: catastrophic cryptograhic failures.
#:
#: See `NIST SP800-38A`_ , Section 6.4 .
#:
#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf
MODE_OFB = 5
#: *CounTeR (CTR)*. This mode is very similar to ECB, in that
#: encryption of one block is done independently of all other blocks.
#: Unlike ECB, the block *position* contributes to the encryption and no
#: information leaks about symbol frequency.
#:
#: Each message block is associated to a *counter* which must be unique
#: across all messages that get encrypted with the same key (not just within
#: the same message). The counter is as big as the block size.
#:
#: Counters can be generated in several ways. The most straightword one is
#: to choose an *initial counter block* (which can be made public, similarly
#: to the *IV* for the other modes) and increment its lowest **m** bits by
#: one (modulo *2^m*) for each block. In most cases, **m** is chosen to be half
#: the block size.
#:
#: Reusing the same *initial counter block* for encryptions done with the same
#: key lead to catastrophic cryptograhic failures.
#:
#: See `NIST SP800-38A`_ , Section 6.5 (for the mode) and Appendix B (for how
#: to manage the *initial counter block*).
#:
#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf
MODE_CTR = 6
#: OpenPGP. This mode is a variant of CFB, and it is only used in PGP and OpenPGP_ applications.
#: An Initialization Vector (*IV*) is required.
#:
#: Unlike CFB, the IV is not transmitted to the receiver. Instead, the *encrypted* IV is.
#: The IV is a random data block. Two of its bytes are duplicated to act as a checksum
#: for the correctness of the key. The encrypted IV is therefore 2 bytes longer than
#: the clean IV.
#:
#: .. _OpenPGP: http://tools.ietf.org/html/rfc4880
MODE_OPENPGP = 7
def _getParameter(name, index, args, kwargs, default=None):
"""Find a parameter in tuple and dictionary arguments a function receives"""
param = kwargs.get(name)
if len(args)>index:
if param:
raise ValueError("Parameter '%s' is specified twice" % name)
param = args[index]
return param or default
class BlockAlgo:
"""Class modelling an abstract block cipher."""
def __init__(self, factory, key, *args, **kwargs):
self.mode = _getParameter('mode', 0, args, kwargs, default=MODE_ECB)
self.block_size = factory.block_size
if self.mode != MODE_OPENPGP:
self._cipher = factory.new(key, *args, **kwargs)
self.IV = self._cipher.IV
else:
# OPENPGP mode. For details, see 13.9 in RCC4880.
#
# A few members are specifically created for this mode:
# - _encrypted_iv, set in this constructor
# - _done_first_block, set to True after the first encryption
# - _done_last_block, set to True after a partial block is processed
self._done_first_block = False
self._done_last_block = False
self.IV = _getParameter('iv', 1, args, kwargs)
if not self.IV:
raise ValueError("MODE_OPENPGP requires an IV")
# Instantiate a temporary cipher to process the IV
IV_cipher = factory.new(key, MODE_CFB,
b('\x00')*self.block_size, # IV for CFB
segment_size=self.block_size*8)
# The cipher will be used for...
if len(self.IV) == self.block_size:
# ... encryption
self._encrypted_IV = IV_cipher.encrypt(
self.IV + self.IV[-2:] + # Plaintext
b('\x00')*(self.block_size-2) # Padding
)[:self.block_size+2]
elif len(self.IV) == self.block_size+2:
# ... decryption
self._encrypted_IV = self.IV
self.IV = IV_cipher.decrypt(self.IV + # Ciphertext
b('\x00')*(self.block_size-2) # Padding
)[:self.block_size+2]
if self.IV[-2:] != self.IV[-4:-2]:
raise ValueError("Failed integrity check for OPENPGP IV")
self.IV = self.IV[:-2]
else:
raise ValueError("Length of IV must be %d or %d bytes for MODE_OPENPGP"
% (self.block_size, self.block_size+2))
# Instantiate the cipher for the real PGP data
self._cipher = factory.new(key, MODE_CFB,
self._encrypted_IV[-self.block_size:],
segment_size=self.block_size*8)
def encrypt(self, plaintext):
"""Encrypt data with the key and the parameters set at initialization.
The cipher object is stateful; encryption of a long block
of data can be broken up in two or more calls to `encrypt()`.
That is, the statement:
>>> c.encrypt(a) + c.encrypt(b)
is always equivalent to:
>>> c.encrypt(a+b)
That also means that you cannot reuse an object for encrypting
or decrypting other data with the same key.
This function does not perform any padding.
- For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *plaintext* length
(in bytes) must be a multiple of *block_size*.
- For `MODE_CFB`, *plaintext* length (in bytes) must be a multiple
of *segment_size*/8.
- For `MODE_CTR`, *plaintext* can be of any length.
- For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*,
unless it is the last chunk of the message.
:Parameters:
plaintext : byte string
The piece of data to encrypt.
:Return:
the encrypted data, as a byte string. It is as long as
*plaintext* with one exception: when encrypting the first message
chunk with `MODE_OPENPGP`, the encypted IV is prepended to the
returned ciphertext.
"""
if self.mode == MODE_OPENPGP:
padding_length = (self.block_size - len(plaintext) % self.block_size) % self.block_size
if padding_length>0:
# CFB mode requires ciphertext to have length multiple of block size,
# but PGP mode allows the last block to be shorter
if self._done_last_block:
raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes",
self.block_size)
self._done_last_block = True
padded = plaintext + b('\x00')*padding_length
res = self._cipher.encrypt(padded)[:len(plaintext)]
else:
res = self._cipher.encrypt(plaintext)
if not self._done_first_block:
res = self._encrypted_IV + res
self._done_first_block = True
return res
return self._cipher.encrypt(plaintext)
def decrypt(self, ciphertext):
"""Decrypt data with the key and the parameters set at initialization.
The cipher object is stateful; decryption of a long block
of data can be broken up in two or more calls to `decrypt()`.
That is, the statement:
>>> c.decrypt(a) + c.decrypt(b)
is always equivalent to:
>>> c.decrypt(a+b)
That also means that you cannot reuse an object for encrypting
or decrypting other data with the same key.
This function does not perform any padding.
- For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *ciphertext* length
(in bytes) must be a multiple of *block_size*.
- For `MODE_CFB`, *ciphertext* length (in bytes) must be a multiple
of *segment_size*/8.
- For `MODE_CTR`, *ciphertext* can be of any length.
- For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*,
unless it is the last chunk of the message.
:Parameters:
ciphertext : byte string
The piece of data to decrypt.
:Return: the decrypted data (byte string, as long as *ciphertext*).
"""
if self.mode == MODE_OPENPGP:
padding_length = (self.block_size - len(ciphertext) % self.block_size) % self.block_size
if padding_length>0:
# CFB mode requires ciphertext to have length multiple of block size,
# but PGP mode allows the last block to be shorter
if self._done_last_block:
raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes",
self.block_size)
self._done_last_block = True
padded = ciphertext + b('\x00')*padding_length
res = self._cipher.decrypt(padded)[:len(ciphertext)]
else:
res = self._cipher.decrypt(ciphertext)
return res
return self._cipher.decrypt(ciphertext)
| bsd-3-clause | 4,329,177,523,181,962,000 | -5,513,316,978,591,554,000 | 41.554054 | 110 | 0.616466 | false |
betoesquivel/fil2014 | filenv/lib/python2.7/site-packages/south/management/commands/syncdb.py | 119 | 4643 | """
Overridden syncdb command
"""
from __future__ import print_function
import sys
from optparse import make_option
from django.core.management.base import NoArgsCommand, BaseCommand
from django.core.management.color import no_style
from django.utils.datastructures import SortedDict
from django.core.management.commands import syncdb
from django.conf import settings
from django.db import models
from django.db.models.loading import cache
from django.core import management
from south.db import dbs
from south import migration
from south.exceptions import NoMigrations
def get_app_label(app):
return '.'.join( app.__name__.split('.')[0:-1] )
class Command(NoArgsCommand):
option_list = syncdb.Command.option_list + (
make_option('--migrate', action='store_true', dest='migrate', default=False,
help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'),
make_option('--all', action='store_true', dest='migrate_all', default=False,
help='Makes syncdb work on all apps, even migrated ones. Be careful!'),
)
if '--verbosity' not in [opt.get_opt_string() for opt in syncdb.Command.option_list]:
option_list += (
make_option('--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
)
help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations."
def handle_noargs(self, migrate_all=False, **options):
# Import the 'management' module within each installed app, to register
# dispatcher events.
# This is copied from Django, to fix bug #511.
try:
from django.utils.importlib import import_module
except ImportError:
pass # TODO: Remove, only for Django1.0
else:
for app_name in settings.INSTALLED_APPS:
try:
import_module('.management', app_name)
except ImportError as exc:
msg = exc.args[0]
if not msg.startswith('No module named') or 'management' not in msg:
raise
# Work out what uses migrations and so doesn't need syncing
apps_needing_sync = []
apps_migrated = []
for app in models.get_apps():
app_label = get_app_label(app)
if migrate_all:
apps_needing_sync.append(app_label)
else:
try:
migrations = migration.Migrations(app_label)
except NoMigrations:
# It needs syncing
apps_needing_sync.append(app_label)
else:
# This is a migrated app, leave it
apps_migrated.append(app_label)
verbosity = int(options.get('verbosity', 0))
# Run syncdb on only the ones needed
if verbosity:
print("Syncing...")
old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync
old_app_store, cache.app_store = cache.app_store, SortedDict([
(k, v) for (k, v) in cache.app_store.items()
if get_app_label(k) in apps_needing_sync
])
# This will allow the setting of the MySQL storage engine, for example.
for db in dbs.values():
db.connection_init()
# OK, run the actual syncdb
syncdb.Command().execute(**options)
settings.INSTALLED_APPS = old_installed
cache.app_store = old_app_store
# Migrate if needed
if options.get('migrate', True):
if verbosity:
print("Migrating...")
# convert from store_true to store_false
options['no_initial_data'] = not options.get('load_initial_data', True)
management.call_command('migrate', **options)
# Be obvious about what we did
if verbosity:
print("\nSynced:\n > %s" % "\n > ".join(apps_needing_sync))
if options.get('migrate', True):
if verbosity:
print("\nMigrated:\n - %s" % "\n - ".join(apps_migrated))
else:
if verbosity:
print("\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated))
print("(use ./manage.py migrate to migrate these)")
| mit | 6,222,776,795,528,823,000 | 348,893,978,310,661,900 | 39.373913 | 148 | 0.58669 | false |
RobertABT/heightmap | build/scipy/scipy/misc/tests/test_common.py | 9 | 2908 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_array_equal, assert_almost_equal, \
assert_array_almost_equal, assert_equal
from scipy.misc import pade, logsumexp, face, ascent
def test_pade_trivial():
nump, denomp = pade([1.0], 0)
assert_array_equal(nump.c, [1.0])
assert_array_equal(denomp.c, [1.0])
def test_pade_4term_exp():
# First four Taylor coefficients of exp(x).
# Unlike poly1d, the first array element is the zero-order term.
an = [1.0, 1.0, 0.5, 1.0/6]
nump, denomp = pade(an, 0)
assert_array_almost_equal(nump.c, [1.0/6, 0.5, 1.0, 1.0])
assert_array_almost_equal(denomp.c, [1.0])
nump, denomp = pade(an, 1)
assert_array_almost_equal(nump.c, [1.0/6, 2.0/3, 1.0])
assert_array_almost_equal(denomp.c, [-1.0/3, 1.0])
nump, denomp = pade(an, 2)
assert_array_almost_equal(nump.c, [1.0/3, 1.0])
assert_array_almost_equal(denomp.c, [1.0/6, -2.0/3, 1.0])
nump, denomp = pade(an, 3)
assert_array_almost_equal(nump.c, [1.0])
assert_array_almost_equal(denomp.c, [-1.0/6, 0.5, -1.0, 1.0])
def test_logsumexp():
"""Test whether logsumexp() function correctly handles large inputs."""
a = np.arange(200)
desired = np.log(np.sum(np.exp(a)))
assert_almost_equal(logsumexp(a), desired)
# Now test with large numbers
b = [1000, 1000]
desired = 1000.0 + np.log(2.0)
assert_almost_equal(logsumexp(b), desired)
n = 1000
b = np.ones(n) * 10000
desired = 10000.0 + np.log(n)
assert_almost_equal(logsumexp(b), desired)
x = np.array([1e-40] * 1000000)
logx = np.log(x)
X = np.vstack([x, x])
logX = np.vstack([logx, logx])
assert_array_almost_equal(np.exp(logsumexp(logX)), X.sum())
assert_array_almost_equal(np.exp(logsumexp(logX, axis=0)), X.sum(axis=0))
assert_array_almost_equal(np.exp(logsumexp(logX, axis=1)), X.sum(axis=1))
def test_logsumexp_b():
a = np.arange(200)
b = np.arange(200, 0, -1)
desired = np.log(np.sum(b*np.exp(a)))
assert_almost_equal(logsumexp(a, b=b), desired)
a = [1000, 1000]
b = [1.2, 1.2]
desired = 1000 + np.log(2 * 1.2)
assert_almost_equal(logsumexp(a, b=b), desired)
x = np.array([1e-40] * 100000)
b = np.linspace(1, 1000, 1e5)
logx = np.log(x)
X = np.vstack((x, x))
logX = np.vstack((logx, logx))
B = np.vstack((b, b))
assert_array_almost_equal(np.exp(logsumexp(logX, b=B)), (B * X).sum())
assert_array_almost_equal(np.exp(logsumexp(logX, b=B, axis=0)),
(B * X).sum(axis=0))
assert_array_almost_equal(np.exp(logsumexp(logX, b=B, axis=1)),
(B * X).sum(axis=1))
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
| mit | 2,186,588,647,507,376,400 | 5,176,856,154,682,617,000 | 29.93617 | 77 | 0.599037 | false |
Subsets and Splits