repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
jordiclariana/ansible | contrib/inventory/zabbix.py | 86 | 4168 | #!/usr/bin/env python
# (c) 2013, Greg Buehler
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Zabbix Server external inventory script.
========================================
Returns hosts and hostgroups from Zabbix Server.
Configuration is read from `zabbix.ini`.
Tested with Zabbix Server 2.0.6.
"""
from __future__ import print_function
import os, sys
import argparse
import ConfigParser
try:
from zabbix_api import ZabbixAPI
except:
print("Error: Zabbix API library must be installed: pip install zabbix-api.",
file=sys.stderr)
sys.exit(1)
try:
import json
except:
import simplejson as json
class ZabbixInventory(object):
def read_settings(self):
config = ConfigParser.SafeConfigParser()
conf_path = './zabbix.ini'
if not os.path.exists(conf_path):
conf_path = os.path.dirname(os.path.realpath(__file__)) + '/zabbix.ini'
if os.path.exists(conf_path):
config.read(conf_path)
# server
if config.has_option('zabbix', 'server'):
self.zabbix_server = config.get('zabbix', 'server')
# login
if config.has_option('zabbix', 'username'):
self.zabbix_username = config.get('zabbix', 'username')
if config.has_option('zabbix', 'password'):
self.zabbix_password = config.get('zabbix', 'password')
def read_cli(self):
parser = argparse.ArgumentParser()
parser.add_argument('--host')
parser.add_argument('--list', action='store_true')
self.options = parser.parse_args()
def hoststub(self):
return {
'hosts': []
}
def get_host(self, api, name):
data = {}
return data
def get_list(self, api):
hostsData = api.host.get({'output': 'extend', 'selectGroups': 'extend'})
data = {}
data[self.defaultgroup] = self.hoststub()
for host in hostsData:
hostname = host['name']
data[self.defaultgroup]['hosts'].append(hostname)
for group in host['groups']:
groupname = group['name']
if not groupname in data:
data[groupname] = self.hoststub()
data[groupname]['hosts'].append(hostname)
return data
def __init__(self):
self.defaultgroup = 'group_all'
self.zabbix_server = None
self.zabbix_username = None
self.zabbix_password = None
self.read_settings()
self.read_cli()
if self.zabbix_server and self.zabbix_username:
try:
api = ZabbixAPI(server=self.zabbix_server)
api.login(user=self.zabbix_username, password=self.zabbix_password)
except BaseException as e:
print("Error: Could not login to Zabbix server. Check your zabbix.ini.", file=sys.stderr)
sys.exit(1)
if self.options.host:
data = self.get_host(api, self.options.host)
print(json.dumps(data, indent=2))
elif self.options.list:
data = self.get_list(api)
print(json.dumps(data, indent=2))
else:
print("usage: --list ..OR.. --host <hostname>", file=sys.stderr)
sys.exit(1)
else:
print("Error: Configuration of server and credentials are required. See zabbix.ini.", file=sys.stderr)
sys.exit(1)
ZabbixInventory()
| gpl-3.0 |
ferreum/distanceutils | tests/test_workshoplevelinfos.py | 1 | 4185 | import unittest
from distance import WorkshopLevelInfos
from distance.printing import PrintContext
from . import common
from .common import check_exceptions
class Version0Test(unittest.TestCase):
LEVEL_IDS = [469806096, 822049253, 738529116, 753242700, 819617632, 837765551, 895852129, 920857185, 922165443, 923374136, 923919455, 925074578, 925807935, 927306728, 927891781, 864308595, 928965113, 928293077, 0, 727889718, 930822683, 931056317, 932950922, 932137893, 933651174, 934668868, 935484491, 936029646, 937387342, 942334134, 937961654, 938169628, 937865749, 939014598, 939810443, 939803443, 939807851, 939554130, 941191706]
LEVEL_NAMES = ['Lost Fortress', 'Main Menu Datastream', 'Linear Green', 'Impurity', 'Canyon Realm (Hot Wheels Acceleracers)', 'The-fall_00', 'Futuristic Highway - Tech District', 'SimpleRace?', 'Stretch Your Wings', '2caKe', 'Jaded Sphere', 'Corrupted Flight', 'Space menu', 'Spirit', 'Laserrush', 'Micro realm (hotwheels acceleracers)', 'Crash Corse MainMenu', 'Sacrifical', '', 'Egypt (Full)', 'Test Lab', 'Test-1A', 'Fog Realm', 'SpeedRun', 'Speed-1', 'Departure', 'Broken Road', 'Death', 'Absurdly Impractical Level', 'storm realm 2.0 (unanimated)', 'The Arctic', 'A Long Way Down', 'Cybergrid Realm (Hot Wheels Acceleracers)', 'Zedron Landing', 'Konna Mondaze', 'Fear', 'Sand', 'Skyline Realm', 'Recovering']
def test_version0(self):
infos = WorkshopLevelInfos("tests/in/workshoplevelinfos/version_0.bytes")
levels = infos.levels
self.assertEqual([l.id for l in levels],
Version0Test.LEVEL_IDS)
self.assertEqual([l.title for l in levels],
Version0Test.LEVEL_NAMES)
self.assertEqual([l.author for l in levels],
['Ferreus'] + ['Unknown'] * 38)
self.assertEqual([l.authorid for l in levels][:3],
[76561198040630941] * 2 + [76561198089234092])
self.assertEqual([l.published_by_user for l in levels],
[1] * 2 + [0] * 37)
self.assertEqual([l.tags for l in levels][:3],
['Level,Sprint,Advanced', 'Main Menu', 'Level,Sprint,Advanced'])
self.assertEqual([l.published_date for l in levels][:3],
[1435349535, 1482243138, 1470466984])
self.assertEqual([l.updated_date for l in levels][:3],
[1438108556, 1482711714, 1494893107])
self.assertEqual([len(l.description) for l in levels][:9],
[158, 33, 215, 39, 255, 26, 60, 124, 353])
self.assertEqual([l.description[:4] for l in levels][:2],
['A hi', 'Very'])
self.assertEqual([l.path for l in levels][:2],
['WorkshopLevels/76561198040630941/lost fortress.bytes',
'WorkshopLevels/76561198040630941/main menu datastream.bytes'])
self.assertEqual([l.upvotes for l in levels][:9],
[2273, 4, 92, 846, 758, 1, 39, 7, 5])
self.assertEqual([l.downvotes for l in levels][:9],
[227, 0, 23, 36, 66, 0, 9, 3, 5])
self.assertEqual([l.rating for l in levels][:9],
[1, 0, 0, 1, 2, 0, 0, 0, 1])
self.assertEqual(39, len(levels))
def test_truncated(self):
infos = WorkshopLevelInfos.maybe("tests/in/workshoplevelinfos/version_0_truncated.bytes")
self.assertRaises(EOFError, check_exceptions, infos)
def test_truncated_2(self):
infos = WorkshopLevelInfos.maybe("tests/in/workshoplevelinfos/version_0_truncated_2.bytes")
self.assertRaises(EOFError, check_exceptions, infos)
def test_print(self):
p = PrintContext.for_test()
obj = WorkshopLevelInfos("tests/in/workshoplevelinfos/version_0.bytes")
p.print_object(obj)
class Version0WriteReadTest(common.WriteReadTest):
filename = "tests/in/workshoplevelinfos/version_0.bytes"
read_obj = WorkshopLevelInfos
def verify_obj(self, obj):
self.assertEqual(obj.levels[-1].title, "Recovering")
# vim:set sw=4 ts=8 sts=4 et:
| mit |
CLVsol/odoo_addons | clv_medicament_dispensation_mng/clv_professional/__init__.py | 3 | 1428 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import clv_professional
| agpl-3.0 |
vanpact/scipy | scipy/optimize/_basinhopping.py | 50 | 26115 | """
basinhopping: The basinhopping global optimization algorithm
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy import cos, sin
import scipy.optimize
import collections
__all__ = ['basinhopping']
class Storage(object):
"""
Class used to store the lowest energy structure
"""
def __init__(self, x, f):
self._add(x, f)
def _add(self, x, f):
self.x = np.copy(x)
self.f = f
def update(self, x, f):
if f < self.f:
self._add(x, f)
return True
else:
return False
def get_lowest(self):
return self.x, self.f
class BasinHoppingRunner(object):
"""This class implements the core of the basinhopping algorithm.
x0 : ndarray
The starting coordinates.
minimizer : callable
The local minimizer, with signature ``result = minimizer(x)``.
The return value is an `optimize.OptimizeResult` object.
step_taking : callable
This function displaces the coordinates randomly. Signature should
be ``x_new = step_taking(x)``. Note that `x` may be modified in-place.
accept_tests : list of callables
To each test is passed the kwargs `f_new`, `x_new`, `f_old` and
`x_old`. These tests will be used to judge whether or not to accept
the step. The acceptable return values are True, False, or ``"force
accept"``. If the latter, then this will override any other tests in
order to accept the step. This can be used, for example, to forcefully
escape from a local minimum that ``basinhopping`` is trapped in.
disp : bool, optional
Display status messages.
"""
def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
self.x = np.copy(x0)
self.minimizer = minimizer
self.step_taking = step_taking
self.accept_tests = accept_tests
self.disp = disp
self.nstep = 0
# initialize return object
self.res = scipy.optimize.OptimizeResult()
self.res.minimization_failures = 0
# do initial minimization
minres = minimizer(self.x)
if not minres.success:
self.res.minimization_failures += 1
if self.disp:
print("warning: basinhopping: local minimization failure")
self.x = np.copy(minres.x)
self.energy = minres.fun
if self.disp:
print("basinhopping step %d: f %g" % (self.nstep, self.energy))
# initialize storage class
self.storage = Storage(self.x, self.energy)
if hasattr(minres, "nfev"):
self.res.nfev = minres.nfev
if hasattr(minres, "njev"):
self.res.njev = minres.njev
if hasattr(minres, "nhev"):
self.res.nhev = minres.nhev
def _monte_carlo_step(self):
"""Do one monte carlo iteration
Randomly displace the coordinates, minimize, and decide whether
or not to accept the new coordinates.
"""
# Take a random step. Make a copy of x because the step_taking
# algorithm might change x in place
x_after_step = np.copy(self.x)
x_after_step = self.step_taking(x_after_step)
# do a local minimization
minres = self.minimizer(x_after_step)
x_after_quench = minres.x
energy_after_quench = minres.fun
if not minres.success:
self.res.minimization_failures += 1
if self.disp:
print("warning: basinhopping: local minimization failure")
if hasattr(minres, "nfev"):
self.res.nfev += minres.nfev
if hasattr(minres, "njev"):
self.res.njev += minres.njev
if hasattr(minres, "nhev"):
self.res.nhev += minres.nhev
# accept the move based on self.accept_tests. If any test is false,
# than reject the step. If any test returns the special value, the
# string 'force accept', accept the step regardless. This can be used
# to forcefully escape from a local minimum if normal basin hopping
# steps are not sufficient.
accept = True
for test in self.accept_tests:
testres = test(f_new=energy_after_quench, x_new=x_after_quench,
f_old=self.energy, x_old=self.x)
if isinstance(testres, bool):
if not testres:
accept = False
elif isinstance(testres, str):
if testres == "force accept":
accept = True
break
else:
raise ValueError("accept test must return bool or string "
"'force accept'. Type is", type(testres))
else:
raise ValueError("accept test must return bool or string "
"'force accept'. Type is", type(testres))
# Report the result of the acceptance test to the take step class.
# This is for adaptive step taking
if hasattr(self.step_taking, "report"):
self.step_taking.report(accept, f_new=energy_after_quench,
x_new=x_after_quench, f_old=self.energy,
x_old=self.x)
return x_after_quench, energy_after_quench, accept
def one_cycle(self):
"""Do one cycle of the basinhopping algorithm
"""
self.nstep += 1
new_global_min = False
xtrial, energy_trial, accept = self._monte_carlo_step()
if accept:
self.energy = energy_trial
self.x = np.copy(xtrial)
new_global_min = self.storage.update(self.x, self.energy)
# print some information
if self.disp:
self.print_report(energy_trial, accept)
if new_global_min:
print("found new global minimum on step %d with function"
" value %g" % (self.nstep, self.energy))
# save some variables as BasinHoppingRunner attributes
self.xtrial = xtrial
self.energy_trial = energy_trial
self.accept = accept
return new_global_min
def print_report(self, energy_trial, accept):
"""print a status update"""
xlowest, energy_lowest = self.storage.get_lowest()
print("basinhopping step %d: f %g trial_f %g accepted %d "
" lowest_f %g" % (self.nstep, self.energy, energy_trial,
accept, energy_lowest))
class AdaptiveStepsize(object):
"""
Class to implement adaptive stepsize.
This class wraps the step taking class and modifies the stepsize to
ensure the true acceptance rate is as close as possible to the target.
Parameters
----------
takestep : callable
The step taking routine. Must contain modifiable attribute
takestep.stepsize
accept_rate : float, optional
The target step acceptance rate
interval : int, optional
Interval for how often to update the stepsize
factor : float, optional
The step size is multiplied or divided by this factor upon each
update.
verbose : bool, optional
Print information about each update
"""
def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9,
verbose=True):
self.takestep = takestep
self.target_accept_rate = accept_rate
self.interval = interval
self.factor = factor
self.verbose = verbose
self.nstep = 0
self.nstep_tot = 0
self.naccept = 0
def __call__(self, x):
return self.take_step(x)
def _adjust_step_size(self):
old_stepsize = self.takestep.stepsize
accept_rate = float(self.naccept) / self.nstep
if accept_rate > self.target_accept_rate:
#We're accepting too many steps. This generally means we're
#trapped in a basin. Take bigger steps
self.takestep.stepsize /= self.factor
else:
#We're not accepting enough steps. Take smaller steps
self.takestep.stepsize *= self.factor
if self.verbose:
print("adaptive stepsize: acceptance rate %f target %f new "
"stepsize %g old stepsize %g" % (accept_rate,
self.target_accept_rate, self.takestep.stepsize,
old_stepsize))
def take_step(self, x):
self.nstep += 1
self.nstep_tot += 1
if self.nstep % self.interval == 0:
self._adjust_step_size()
return self.takestep(x)
def report(self, accept, **kwargs):
"called by basinhopping to report the result of the step"
if accept:
self.naccept += 1
class RandomDisplacement(object):
"""
Add a random displacement of maximum size, stepsize, to the coordinates
update x inplace
"""
def __init__(self, stepsize=0.5):
self.stepsize = stepsize
def __call__(self, x):
x += np.random.uniform(-self.stepsize, self.stepsize, np.shape(x))
return x
class MinimizerWrapper(object):
"""
wrap a minimizer function as a minimizer class
"""
def __init__(self, minimizer, func=None, **kwargs):
self.minimizer = minimizer
self.func = func
self.kwargs = kwargs
def __call__(self, x0):
if self.func is None:
return self.minimizer(x0, **self.kwargs)
else:
return self.minimizer(self.func, x0, **self.kwargs)
class Metropolis(object):
"""
Metropolis acceptance criterion
"""
def __init__(self, T):
self.beta = 1.0 / T
def accept_reject(self, energy_new, energy_old):
w = min(1.0, np.exp(-(energy_new - energy_old) * self.beta))
rand = np.random.rand()
return w >= rand
def __call__(self, **kwargs):
"""
f_new and f_old are mandatory in kwargs
"""
return bool(self.accept_reject(kwargs["f_new"],
kwargs["f_old"]))
def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5,
minimizer_kwargs=None, take_step=None, accept_test=None,
callback=None, interval=50, disp=False, niter_success=None):
"""
Find the global minimum of a function using the basin-hopping algorithm
Parameters
----------
func : callable ``f(x, *args)``
Function to be optimized. ``args`` can be passed as an optional item
in the dict ``minimizer_kwargs``
x0 : ndarray
Initial guess.
niter : integer, optional
The number of basin hopping iterations
T : float, optional
The "temperature" parameter for the accept or reject criterion. Higher
"temperatures" mean that larger jumps in function value will be
accepted. For best results ``T`` should be comparable to the
separation
(in function value) between local minima.
stepsize : float, optional
initial step size for use in the random displacement.
minimizer_kwargs : dict, optional
Extra keyword arguments to be passed to the minimizer
``scipy.optimize.minimize()`` Some important options could be:
method : str
The minimization method (e.g. ``"L-BFGS-B"``)
args : tuple
Extra arguments passed to the objective function (``func``) and
its derivatives (Jacobian, Hessian).
take_step : callable ``take_step(x)``, optional
Replace the default step taking routine with this routine. The default
step taking routine is a random displacement of the coordinates, but
other step taking algorithms may be better for some systems.
``take_step`` can optionally have the attribute ``take_step.stepsize``.
If this attribute exists, then ``basinhopping`` will adjust
``take_step.stepsize`` in order to try to optimize the global minimum
search.
accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional
Define a test which will be used to judge whether or not to accept the
step. This will be used in addition to the Metropolis test based on
"temperature" ``T``. The acceptable return values are True,
False, or ``"force accept"``. If the latter, then this will
override any other tests in order to accept the step. This can be
used, for example, to forcefully escape from a local minimum that
``basinhopping`` is trapped in.
callback : callable, ``callback(x, f, accept)``, optional
A callback function which will be called for all minima found. ``x``
and ``f`` are the coordinates and function value of the trial minimum,
and ``accept`` is whether or not that minimum was accepted. This can be
used, for example, to save the lowest N minima found. Also,
``callback`` can be used to specify a user defined stop criterion by
optionally returning True to stop the ``basinhopping`` routine.
interval : integer, optional
interval for how often to update the ``stepsize``
disp : bool, optional
Set to True to print status messages
niter_success : integer, optional
Stop the run if the global minimum candidate remains the same for this
number of iterations.
Returns
-------
res : OptimizeResult
The optimization result represented as a ``OptimizeResult`` object. Important
attributes are: ``x`` the solution array, ``fun`` the value of the
function at the solution, and ``message`` which describes the cause of
the termination. See `OptimizeResult` for a description of other attributes.
See Also
--------
minimize :
The local minimization function called once for each basinhopping step.
``minimizer_kwargs`` is passed to this routine.
Notes
-----
Basin-hopping is a stochastic algorithm which attempts to find the global
minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_
[4]_. The algorithm in its current form was described by David Wales and
Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/.
The algorithm is iterative with each cycle composed of the following
features
1) random perturbation of the coordinates
2) local minimization
3) accept or reject the new coordinates based on the minimized function
value
The acceptance test used here is the Metropolis criterion of standard Monte
Carlo algorithms, although there are many other possibilities [3]_.
This global minimization method has been shown to be extremely efficient
for a wide variety of problems in physics and chemistry. It is
particularly useful when the function has many minima separated by large
barriers. See the Cambridge Cluster Database
http://www-wales.ch.cam.ac.uk/CCD.html for databases of molecular systems
that have been optimized primarily using basin-hopping. This database
includes minimization problems exceeding 300 degrees of freedom.
See the free software program GMIN (http://www-wales.ch.cam.ac.uk/GMIN) for
a Fortran implementation of basin-hopping. This implementation has many
different variations of the procedure described above, including more
advanced step taking algorithms and alternate acceptance criterion.
For stochastic global optimization there is no way to determine if the true
global minimum has actually been found. Instead, as a consistency check,
the algorithm can be run from a number of different random starting points
to ensure the lowest minimum found in each example has converged to the
global minimum. For this reason ``basinhopping`` will by default simply
run for the number of iterations ``niter`` and return the lowest minimum
found. It is left to the user to ensure that this is in fact the global
minimum.
Choosing ``stepsize``: This is a crucial parameter in ``basinhopping`` and
depends on the problem being solved. Ideally it should be comparable to
the typical separation between local minima of the function being
optimized. ``basinhopping`` will, by default, adjust ``stepsize`` to find
an optimal value, but this may take many iterations. You will get quicker
results if you set a sensible value for ``stepsize``.
Choosing ``T``: The parameter ``T`` is the temperature used in the
metropolis criterion. Basinhopping steps are accepted with probability
``1`` if ``func(xnew) < func(xold)``, or otherwise with probability::
exp( -(func(xnew) - func(xold)) / T )
So, for best results, ``T`` should to be comparable to the typical
difference in function values between local minima.
.. versionadded:: 0.12.0
References
----------
.. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press,
Cambridge, UK.
.. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and
the Lowest Energy Structures of Lennard-Jones Clusters Containing up to
110 Atoms. Journal of Physical Chemistry A, 1997, 101, 5111.
.. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the
multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA,
1987, 84, 6611.
.. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters,
crystals, and biomolecules, Science, 1999, 285, 1368.
Examples
--------
The following example is a one-dimensional minimization problem, with many
local minima superimposed on a parabola.
>>> from scipy.optimize import basinhopping
>>> func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x
>>> x0=[1.]
Basinhopping, internally, uses a local minimization algorithm. We will use
the parameter ``minimizer_kwargs`` to tell basinhopping which algorithm to
use and how to set up that minimizer. This parameter will be passed to
``scipy.optimize.minimize()``.
>>> minimizer_kwargs = {"method": "BFGS"}
>>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs,
... niter=200)
>>> print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun))
global minimum: x = -0.1951, f(x0) = -1.0009
Next consider a two-dimensional minimization problem. Also, this time we
will use gradient information to significantly speed up the search.
>>> def func2d(x):
... f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] +
... 0.2) * x[0]
... df = np.zeros(2)
... df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2
... df[1] = 2. * x[1] + 0.2
... return f, df
We'll also use a different local minimization algorithm. Also we must tell
the minimizer that our function returns both energy and gradient (jacobian)
>>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True}
>>> x0 = [1.0, 1.0]
>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
... niter=200)
>>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
... ret.x[1],
... ret.fun))
global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
Here is an example using a custom step taking routine. Imagine you want
the first coordinate to take larger steps then the rest of the coordinates.
This can be implemented like so:
>>> class MyTakeStep(object):
... def __init__(self, stepsize=0.5):
... self.stepsize = stepsize
... def __call__(self, x):
... s = self.stepsize
... x[0] += np.random.uniform(-2.*s, 2.*s)
... x[1:] += np.random.uniform(-s, s, x[1:].shape)
... return x
Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude
of ``stepsize`` to optimize the search. We'll use the same 2-D function as
before
>>> mytakestep = MyTakeStep()
>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
... niter=200, take_step=mytakestep)
>>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
... ret.x[1],
... ret.fun))
global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
Now let's do an example using a custom callback function which prints the
value of every minimum found
>>> def print_fun(x, f, accepted):
... print("at minimum %.4f accepted %d" % (f, int(accepted)))
We'll run it for only 10 basinhopping steps this time.
>>> np.random.seed(1)
>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
... niter=10, callback=print_fun)
at minimum 0.4159 accepted 1
at minimum -0.9073 accepted 1
at minimum -0.1021 accepted 1
at minimum -0.1021 accepted 1
at minimum 0.9102 accepted 1
at minimum 0.9102 accepted 1
at minimum 2.2945 accepted 0
at minimum -0.1021 accepted 1
at minimum -1.0109 accepted 1
at minimum -1.0109 accepted 1
The minimum at -1.0109 is actually the global minimum, found already on the
8th iteration.
Now let's implement bounds on the problem using a custom ``accept_test``:
>>> class MyBounds(object):
... def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ):
... self.xmax = np.array(xmax)
... self.xmin = np.array(xmin)
... def __call__(self, **kwargs):
... x = kwargs["x_new"]
... tmax = bool(np.all(x <= self.xmax))
... tmin = bool(np.all(x >= self.xmin))
... return tmax and tmin
>>> mybounds = MyBounds()
>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
... niter=10, accept_test=mybounds)
"""
x0 = np.array(x0)
# set up minimizer
if minimizer_kwargs is None:
minimizer_kwargs = dict()
wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func,
**minimizer_kwargs)
# set up step taking algorithm
if take_step is not None:
if not isinstance(take_step, collections.Callable):
raise TypeError("take_step must be callable")
# if take_step.stepsize exists then use AdaptiveStepsize to control
# take_step.stepsize
if hasattr(take_step, "stepsize"):
take_step_wrapped = AdaptiveStepsize(take_step, interval=interval,
verbose=disp)
else:
take_step_wrapped = take_step
else:
# use default
displace = RandomDisplacement(stepsize=stepsize)
take_step_wrapped = AdaptiveStepsize(displace, interval=interval,
verbose=disp)
# set up accept tests
if accept_test is not None:
if not isinstance(accept_test, collections.Callable):
raise TypeError("accept_test must be callable")
accept_tests = [accept_test]
else:
accept_tests = []
# use default
metropolis = Metropolis(T)
accept_tests.append(metropolis)
if niter_success is None:
niter_success = niter + 2
bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped,
accept_tests, disp=disp)
# start main iteration loop
count = 0
message = ["requested number of basinhopping iterations completed"
" successfully"]
for i in range(niter):
new_global_min = bh.one_cycle()
if isinstance(callback, collections.Callable):
# should we pass a copy of x?
val = callback(bh.xtrial, bh.energy_trial, bh.accept)
if val is not None:
if val:
message = ["callback function requested stop early by"
"returning True"]
break
count += 1
if new_global_min:
count = 0
elif count > niter_success:
message = ["success condition satisfied"]
break
# prepare return object
lowest = bh.storage.get_lowest()
res = bh.res
res.x = np.copy(lowest[0])
res.fun = lowest[1]
res.message = message
res.nit = i + 1
return res
def _test_func2d_nograd(x):
f = (cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
+ 1.010876184442655)
return f
def _test_func2d(x):
f = (cos(14.5 * x[0] - 0.3) + (x[0] + 0.2) * x[0] + cos(14.5 * x[1] -
0.3) + (x[1] + 0.2) * x[1] + x[0] * x[1] + 1.963879482144252)
df = np.zeros(2)
df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + x[1]
df[1] = -14.5 * sin(14.5 * x[1] - 0.3) + 2. * x[1] + 0.2 + x[0]
return f, df
if __name__ == "__main__":
print("\n\nminimize a 2d function without gradient")
# minimum expected at ~[-0.195, -0.1]
kwargs = {"method": "L-BFGS-B"}
x0 = np.array([1.0, 1.])
scipy.optimize.minimize(_test_func2d_nograd, x0, **kwargs)
ret = basinhopping(_test_func2d_nograd, x0, minimizer_kwargs=kwargs,
niter=200, disp=False)
print("minimum expected at func([-0.195, -0.1]) = 0.0")
print(ret)
print("\n\ntry a harder 2d problem")
kwargs = {"method": "L-BFGS-B", "jac": True}
x0 = np.array([1.0, 1.0])
ret = basinhopping(_test_func2d, x0, minimizer_kwargs=kwargs, niter=200,
disp=False)
print("minimum expected at ~, func([-0.19415263, -0.19415263]) = 0")
print(ret)
| bsd-3-clause |
seekuoitdc/clubcheckout | workingdemo/basic/3server.py | 1 | 4657 | from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from os import curdir, sep
import urlparse, pickle, os
p = 'htmlsnips/'
nofile = 'Bud, I can\'t find the file.'
def makePage():
if not os.path.isfile('login.html'):
try:
with open(p+'header.xml', 'r') as h, open(p+'login.xml','r') as i, open(p+'login.html', 'w') as o:
page += h.read() + i.read() + '</html>'
o.write(page)
except IOError:
print nofile
if not os.path.isfile('register.html'):
try:
with open(p+'header.xml', 'r') as h, open(p+'register.xml','r') as i, open(p+'register.html', 'w') as o:
page += h.read() + i.read() + '</html>'
o.write(page)
except IOError:
print nofile
def makeTable(itemname='',loginuser=''):
items = []
page = ""
try:
with open('clubinventory/seekequipment.pickle') as f:
items = pickle.load(f)
except:
pass
try:
with open("htmlsnips/header.xml", 'rU') as f:
page += f.read()
except:
pass
try:
with open('users/oldusers.pickle') as f:
oldusers += pickle.load()
except:
print "oldusers is not there in file"
page += """
<body>
<h1>SEEK Hardware</h1>
<p>Software and Electrical Engineering Klub hardware for members.</p>
<p><a href="/login/">Log In</a> or <a href="/register/">Register</a> to sign out hardware on weekly loans.</p>
"""
page += """
<table>
"""
row = 0
for listi in items:
counter = 0
#print "in"
row += 1
if row > 1:
page += """
</tr>
"""
page += "<tr>"
listilength = len(listi)
founditem = False
##print "len(listi): ", listilength
firsti = ''
for i in listi:
if counter == 0:
firsti = i.lower()
counter += 1
##print counter
if firsti == itemname and loginuser != '':
founditem = True
##print founditem
if counter == listilength:
if founditem:
rowtodec = items.index(listi)
celltodec = listi.index(i)
#print "not yet decremented item in row ", rowtodec, " cell ", celltodec
num = int(items[rowtodec][celltodec])
num -= 1
items[rowtodec][celltodec] = num
page += "<td>" + str(num) + "</td>"
else:
page += "<td>" + str(i) + "</td>"
if loginuser != '':
page += '<td><form method="POST"><input type="radio" name="itemtosignout"'+' value="'+firsti+'"'+'></input><input type="submit" value="Sign Out"></input></form></td>'
else:
page += "<td>" + str(i) + "</td>"
page += "</tr>\n</table>"
page += "</body>\n</html>"
try:
with open("index.html", 'w') as o:
o.write(page)
except:
pass
try:
with open('clubinventory/seekequipment.pickle', 'wb') as o:
pickle.dump(items, o)
except:
pass
class myHandler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path == "/":
makeTable()
self.path = "/index.html"
elif self.path == "/login":
makeTable()
self.path = "/login/index.html"
elif self.path == "/register":
makeTable()
self.path = "/register/index.html"
sendreply = False
try:
if self.path.endswith(".html"):
mimetype = 'text/html'
sendreply = True
if (sendreply == True):
f = open(curdir + sep + self.path)
self.send_response(200)
self.send_header('Content-type',mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File not found: %s' % self.path)
def do_POST(self):
"""Handler for POST requests"""
length = int(self.headers['Content-Length'])
post_data = urlparse.parse_qs(self.rfile.read(length).decode('utf-8'))
try:
whotodecrement = post_data['itemtosignout'][0]
except KeyError:
whotodecrement = ''
makeTable(whotodecrement)
try:
registeruser = post_data['register'][0]
registeruser = lower(registeruser)
if registeruser.endswith('@uoit.net') or registeruser.endswith('@uoit.ca'):
if not registeruser in oldusers:
oldusers[registeruser] = ['abcd']
except KeyError:
pass
try:
loginuser = post_data['login'][0]
password = post_data['login'][1]
if loginuser.endswith('@uoit.net') or loginuser.endswith('@uoit.ca'):
if loginuser in oldusers:
makeTable('',loginuser)
except:
pass
if self.path == "/":
self.path = "/index.html"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
f = open(curdir + sep + self.path)
self.wfile.write(f.read())
f.close()
SERVER_LOCATION = '0.0.0.0'
PORT_NUMBER = 8080
try:
server = HTTPServer((SERVER_LOCATION,PORT_NUMBER),myHandler)
print "Started HTTP server at ", SERVER_LOCATION, " port ", PORT_NUMBER
server.serve_forever()
except KeyboardInterrupt:
print "\nKill signal received, server shutting down"
server.socket.close()
| mit |
pincopallino93/rdfendpoints | lib/rdflib/plugins/parsers/nquads.py | 20 | 3055 | """
This is a rdflib plugin for parsing NQuad files into Conjunctive
graphs that can be used and queried. The store that backs the graph
*must* be able to handle contexts.
>>> from rdflib import ConjunctiveGraph, URIRef, Namespace
>>> g = ConjunctiveGraph()
>>> data = open("test/nquads.rdflib/example.nquads", "rb")
>>> g.parse(data, format="nquads") # doctest:+ELLIPSIS
<Graph identifier=... (<class 'rdflib.graph.Graph'>)>
>>> assert len(g.store) == 449
>>> # There should be 16 separate contexts
>>> assert len([x for x in g.store.contexts()]) == 16
>>> # is the name of entity E10009 "Arco Publications"?
>>> # (in graph http://bibliographica.org/entity/E10009)
>>> # Looking for:
>>> # <http://bibliographica.org/entity/E10009>
>>> # <http://xmlns.com/foaf/0.1/name>
>>> # "Arco Publications"
>>> # <http://bibliographica.org/entity/E10009>
>>> s = URIRef("http://bibliographica.org/entity/E10009")
>>> FOAF = Namespace("http://xmlns.com/foaf/0.1/")
>>> assert(g.value(s, FOAF.name).eq("Arco Publications"))
"""
from codecs import getreader
from rdflib.py3compat import b
from rdflib import ConjunctiveGraph
# Build up from the NTriples parser:
from rdflib.plugins.parsers.ntriples import NTriplesParser
from rdflib.plugins.parsers.ntriples import ParseError
from rdflib.plugins.parsers.ntriples import r_tail
from rdflib.plugins.parsers.ntriples import r_wspace
from rdflib.plugins.parsers.ntriples import r_wspaces
__all__ = ['NQuadsParser']
class NQuadsParser(NTriplesParser):
def parse(self, inputsource, sink, **kwargs):
"""Parse f as an N-Triples file."""
assert sink.store.context_aware, ("NQuadsParser must be given"
" a context aware store.")
self.sink = ConjunctiveGraph(store=sink.store)
source = inputsource.getByteStream()
if not hasattr(source, 'read'):
raise ParseError("Item to parse must be a file-like object.")
source = getreader('utf-8')(source)
self.file = source
self.buffer = ''
while True:
self.line = __line = self.readline()
if self.line is None:
break
try:
self.parseline()
except ParseError, msg:
raise ParseError("Invalid line (%s):\n%r" % (msg, __line))
return self.sink
def parseline(self):
self.eat(r_wspace)
if (not self.line) or self.line.startswith(('#')):
return # The line is empty or a comment
subject = self.subject()
self.eat(r_wspace)
predicate = self.predicate()
self.eat(r_wspace)
obj = self.object()
self.eat(r_wspace)
context = self.uriref() or self.nodeid()
self.eat(r_tail)
if self.line:
raise ParseError("Trailing garbage")
# Must have a context aware store - add on a normal Graph
# discards anything where the ctx != graph.identifier
self.sink.get_context(context).add((subject, predicate, obj))
| apache-2.0 |
IONISx/edx-platform | common/lib/xmodule/xmodule/modulestore/split_mongo/id_manager.py | 189 | 1433 | """
An implementation of IdReader and IdGenerator that manages ids for the SplitMongo storage
mechanism.
"""
from opaque_keys.edx.locator import LocalId, DefinitionLocator
from xmodule.x_module import OpaqueKeyReader, AsideKeyGenerator
from xmodule.modulestore.split_mongo import BlockKey
# TODO: Migrate split_mongo to use this class for all key mapping/creation.
class SplitMongoIdManager(OpaqueKeyReader, AsideKeyGenerator): # pylint: disable=abstract-method
"""
An IdManager that knows how to retrieve the DefinitionLocator, given
a usage_id and a :class:`.CachingDescriptorSystem`.
"""
def __init__(self, caching_descriptor_system):
self._cds = caching_descriptor_system
def get_definition_id(self, usage_id):
if isinstance(usage_id.block_id, LocalId):
# a LocalId indicates that this block hasn't been persisted yet, and is instead stored
# in-memory in the local_modules dictionary.
return self._cds.local_modules[usage_id].scope_ids.def_id
else:
block_key = BlockKey.from_usage_key(usage_id)
module_data = self._cds.get_module_data(block_key, usage_id.course_key)
if module_data.definition is not None:
return DefinitionLocator(usage_id.block_type, module_data.definition)
else:
raise ValueError("All non-local blocks should have a definition specified")
| agpl-3.0 |
citrix-openstack-build/heat | heat/openstack/common/rpc/impl_fake.py | 5 | 5870 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Fake RPC implementation which calls proxy methods directly with no
queues. Casts will block, but this is very useful for tests.
"""
import inspect
# NOTE(russellb): We specifically want to use json, not our own jsonutils.
# jsonutils has some extra logic to automatically convert objects to primitive
# types so that they can be serialized. We want to catch all cases where
# non-primitive types make it into this code and treat it as an error.
import json
import time
import eventlet
from heat.openstack.common.rpc import common as rpc_common
CONSUMERS = {}
class RpcContext(rpc_common.CommonRpcContext):
def __init__(self, **kwargs):
super(RpcContext, self).__init__(**kwargs)
self._response = []
self._done = False
def deepcopy(self):
values = self.to_dict()
new_inst = self.__class__(**values)
new_inst._response = self._response
new_inst._done = self._done
return new_inst
def reply(self, reply=None, failure=None, ending=False):
if ending:
self._done = True
if not self._done:
self._response.append((reply, failure))
class Consumer(object):
def __init__(self, topic, proxy):
self.topic = topic
self.proxy = proxy
def call(self, context, version, method, namespace, args, timeout):
done = eventlet.event.Event()
def _inner():
ctxt = RpcContext.from_dict(context.to_dict())
try:
rval = self.proxy.dispatch(context, version, method,
namespace, **args)
res = []
# Caller might have called ctxt.reply() manually
for (reply, failure) in ctxt._response:
if failure:
raise failure[0], failure[1], failure[2]
res.append(reply)
# if ending not 'sent'...we might have more data to
# return from the function itself
if not ctxt._done:
if inspect.isgenerator(rval):
for val in rval:
res.append(val)
else:
res.append(rval)
done.send(res)
except rpc_common.ClientException as e:
done.send_exception(e._exc_info[1])
except Exception as e:
done.send_exception(e)
thread = eventlet.greenthread.spawn(_inner)
if timeout:
start_time = time.time()
while not done.ready():
eventlet.greenthread.sleep(1)
cur_time = time.time()
if (cur_time - start_time) > timeout:
thread.kill()
raise rpc_common.Timeout()
return done.wait()
class Connection(object):
"""Connection object."""
def __init__(self):
self.consumers = []
def create_consumer(self, topic, proxy, fanout=False):
consumer = Consumer(topic, proxy)
self.consumers.append(consumer)
if topic not in CONSUMERS:
CONSUMERS[topic] = []
CONSUMERS[topic].append(consumer)
def close(self):
for consumer in self.consumers:
CONSUMERS[consumer.topic].remove(consumer)
self.consumers = []
def consume_in_thread(self):
pass
def create_connection(conf, new=True):
"""Create a connection."""
return Connection()
def check_serialize(msg):
"""Make sure a message intended for rpc can be serialized."""
json.dumps(msg)
def multicall(conf, context, topic, msg, timeout=None):
"""Make a call that returns multiple times."""
check_serialize(msg)
method = msg.get('method')
if not method:
return
args = msg.get('args', {})
version = msg.get('version', None)
namespace = msg.get('namespace', None)
try:
consumer = CONSUMERS[topic][0]
except (KeyError, IndexError):
raise rpc_common.Timeout("No consumers available")
else:
return consumer.call(context, version, method, namespace, args,
timeout)
def call(conf, context, topic, msg, timeout=None):
"""Sends a message on a topic and wait for a response."""
rv = multicall(conf, context, topic, msg, timeout)
# NOTE(vish): return the last result from the multicall
rv = list(rv)
if not rv:
return
return rv[-1]
def cast(conf, context, topic, msg):
check_serialize(msg)
try:
call(conf, context, topic, msg)
except Exception:
pass
def notify(conf, context, topic, msg, envelope):
check_serialize(msg)
def cleanup():
pass
def fanout_cast(conf, context, topic, msg):
"""Cast to all consumers of a topic."""
check_serialize(msg)
method = msg.get('method')
if not method:
return
args = msg.get('args', {})
version = msg.get('version', None)
namespace = msg.get('namespace', None)
for consumer in CONSUMERS.get(topic, []):
try:
consumer.call(context, version, method, namespace, args, None)
except Exception:
pass
| apache-2.0 |
indictranstech/ebuy-now-frappe | frappe/website/doctype/web_page/web_page.py | 4 | 8841 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, re, os, json, imp
import requests, requests.exceptions
from frappe.utils import strip_html
from frappe.website.website_generator import WebsiteGenerator
from frappe.website.router import resolve_route
from frappe.website.doctype.website_slideshow.website_slideshow import get_slideshow
from frappe.website.utils import find_first_image, get_comment_list, get_full_index
from markdown2 import markdown
from frappe.utils.jinja import render_template
from jinja2.exceptions import TemplateSyntaxError
from frappe import _
class WebPage(WebsiteGenerator):
save_versions = True
website = frappe._dict(
template = "templates/generators/web_page.html",
condition_field = "published",
page_title_field = "title",
parent_website_route_field = "parent_web_page"
)
def get_feed(self):
return self.title
def validate(self):
if self.template_path and not getattr(self, "from_website_sync"):
frappe.throw(frappe._("Cannot edit templated page"))
# avoid recursive parent_web_page.
if self.parent_web_page == self.page_name:
self.parent_web_page = ""
self.parent_website_route = ""
super(WebPage, self).validate()
def get_context(self, context):
# if static page, get static content
if context.slideshow:
context.update(get_slideshow(self))
if self.enable_comments:
context.comment_list = get_comment_list(self.doctype, self.name)
# for sidebar and breadcrumbs
context.children = self.get_children()
context.parents = self.get_parents(context)
if self.template_path:
# render dynamic context (if .py file exists)
# get absolute template path considering first fragment as app name
split_path = self.template_path.split(os.sep)
self.template_path = os.path.join(frappe.get_app_path(split_path[0]), *split_path[1:])
context = self.get_dynamic_context(frappe._dict(context))
# load content from template
self.get_static_content(context)
else:
context.update({
"style": self.css or "",
"script": self.javascript or "",
"header": self.header,
"title": self.title,
"text_align": self.text_align,
})
if self.description:
context.setdefault("metatags", {})["description"] = self.description
if not self.show_title:
context["no_header"] = 1
self.set_metatags(context)
self.set_breadcrumbs(context)
self.set_title_and_header(context)
self.add_index(context)
return context
def render_dynamic(self, context):
# dynamic
is_jinja = "<!-- jinja -->" in context.main_section
if is_jinja or ("{{" in context.main_section):
try:
context["main_section"] = render_template(context.main_section,
context)
if not "<!-- static -->" in context.main_section:
context["no_cache"] = 1
except TemplateSyntaxError:
if is_jinja:
raise
def set_breadcrumbs(self, context):
"""Build breadcrumbs template (deprecated)"""
if not "no_breadcrumbs" in context:
if "<!-- no-breadcrumbs -->" in context.main_section:
context.no_breadcrumbs = 1
def set_title_and_header(self, context):
"""Extract and set title and header from content or context."""
if not "no_header" in context:
if "<!-- no-header -->" in context.main_section:
context.no_header = 1
if "<!-- title:" in context.main_section:
context.title = re.findall('<!-- title:([^>]*) -->', context.main_section)[0].strip()
if context.get("page_titles") and context.page_titles.get(context.pathname):
context.title = context.page_titles.get(context.pathname)[0]
# header
if context.no_header and "header" in context:
context.header = ""
if not context.no_header:
# if header not set and no h1 tag in the body, set header as title
if not context.header and "<h1" not in context.main_section:
context.header = context.title
# add h1 tag to header
if context.get("header") and not re.findall("<h.>", context.header):
context.header = "<h1>" + context.header + "</h1>"
# if title not set, set title from header
if not context.title and context.header:
context.title = strip_html(context.header)
def add_index(self, context):
"""Add index, next button if `{index}`, `{next}` is present."""
# table of contents
extn = ""
if context.page_links_with_extn:
extn = ".html"
if "{index}" in context.main_section and context.get("children") and len(context.children):
full_index = get_full_index(context.pathname, extn = extn)
if full_index:
html = frappe.get_template("templates/includes/full_index.html").render({
"full_index": full_index,
"url_prefix": context.url_prefix
})
context.main_section = context.main_section.replace("{index}", html)
# next and previous
if "{next}" in context.main_section:
next_item = self.get_next()
next_item.extn = "" if self.has_children(next_item.name) else extn
if next_item and next_item.page_name:
if context.relative_links:
if next_item.next_parent:
next_item.name = "../" + next_item.page_name or ""
else:
next_item.name = next_item.page_name or ""
else:
if next_item and next_item.name and next_item.name[0]!="/":
next_item.name = "/" + next_item.name
if not next_item.title:
next_item.title = ""
html = ('<p class="btn-next-wrapper">'+_("Next")\
+': <a class="btn-next" href="{name}{extn}">{title}</a></p>').format(**next_item)
else:
html = ""
context.main_section = context.main_section.replace("{next}", html)
def add_hero(self, context):
"""Add a hero element if specified in content or hooks.
Hero elements get full page width."""
context.hero = ""
if "<!-- start-hero -->" in context.main_section:
parts1 = context.main_section.split("<!-- start-hero -->")
parts2 = parts1[1].split("<!-- end-hero -->")
context.main_section = parts1[0] + parts2[1]
context.hero = parts2[0]
def get_static_content(self, context):
with open(self.template_path, "r") as contentfile:
content = unicode(contentfile.read(), 'utf-8')
if self.template_path.endswith(".md"):
if content:
lines = content.splitlines()
first_line = lines[0].strip()
if first_line.startswith("# "):
context.title = first_line[2:]
content = "\n".join(lines[1:])
content = markdown(content)
context.main_section = unicode(content.encode("utf-8"), 'utf-8')
self.check_for_redirect(context)
if not context.title:
context.title = self.name.replace("-", " ").replace("_", " ").title()
self.render_dynamic(context)
for extn in ("js", "css"):
fpath = self.template_path.rsplit(".", 1)[0] + "." + extn
if os.path.exists(fpath):
with open(fpath, "r") as f:
context["style" if extn=="css" else "script"] = f.read()
def check_for_redirect(self, context):
if "<!-- redirect:" in context.main_section:
frappe.local.flags.redirect_location = \
context.main_section.split("<!-- redirect:")[1].split("-->")[0].strip()
raise frappe.Redirect
def get_dynamic_context(self, context):
"update context from `.py` and load sidebar from `_sidebar.json` if either exists"
basename = os.path.basename(self.template_path).rsplit(".", 1)[0]
module_path = os.path.join(os.path.dirname(self.template_path),
frappe.scrub(basename) + ".py")
if os.path.exists(module_path):
module = imp.load_source(basename, module_path)
if hasattr(module, "get_context"):
ret = module.get_context(context)
if ret:
context = ret
# sidebar?
sidebar_path = os.path.join(os.path.dirname(self.template_path), "_sidebar.json")
if os.path.exists(sidebar_path):
with open(sidebar_path, "r") as f:
context.children = json.loads(f.read())
return context
def set_metatags(self, context):
context.metatags = {
"name": context.title,
"description": (context.description or "").replace("\n", " ")[:500]
}
image = find_first_image(context.main_section or "")
if image:
context.metatags["image"] = image
def check_broken_links():
cnt = 0
for p in frappe.db.sql("select name, main_section from `tabWeb Page`", as_dict=True):
for link in re.findall('href=["\']([^"\']*)["\']', p.main_section):
if link.startswith("http"):
try:
res = requests.get(link)
except requests.exceptions.SSLError:
res = frappe._dict({"status_code": "SSL Error"})
except requests.exceptions.ConnectionError:
res = frappe._dict({"status_code": "Connection Error"})
if res.status_code!=200:
print "[{0}] {1}: {2}".format(res.status_code, p.name, link)
cnt += 1
else:
link = link[1:] # remove leading /
link = link.split("#")[0]
if not resolve_route(link):
print p.name + ":" + link
cnt += 1
print "{0} links broken".format(cnt)
| mit |
oscarolar/odoo | addons/account/report/account_general_ledger.py | 183 | 15416 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2005-2006 CamptoCamp
# Copyright (c) 2006-2010 OpenERP S.A
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
from common_report_header import common_report_header
class general_ledger(report_sxw.rml_parse, common_report_header):
_name = 'report.account.general.ledger'
def set_context(self, objects, data, ids, report_type=None):
new_ids = ids
obj_move = self.pool.get('account.move.line')
self.sortby = data['form'].get('sortby', 'sort_date')
self.query = obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context',{}))
ctx2 = data['form'].get('used_context',{}).copy()
self.init_balance = data['form'].get('initial_balance', True)
if self.init_balance:
ctx2.update({'initial_bal': True})
self.init_query = obj_move._query_get(self.cr, self.uid, obj='l', context=ctx2)
self.display_account = data['form']['display_account']
self.target_move = data['form'].get('target_move', 'all')
ctx = self.context.copy()
ctx['fiscalyear'] = data['form']['fiscalyear_id']
if data['form']['filter'] == 'filter_period':
ctx['periods'] = data['form']['periods']
elif data['form']['filter'] == 'filter_date':
ctx['date_from'] = data['form']['date_from']
ctx['date_to'] = data['form']['date_to']
ctx['state'] = data['form']['target_move']
self.context.update(ctx)
if (data['model'] == 'ir.ui.menu'):
new_ids = [data['form']['chart_account_id']]
objects = self.pool.get('account.account').browse(self.cr, self.uid, new_ids)
return super(general_ledger, self).set_context(objects, data, new_ids, report_type=report_type)
def __init__(self, cr, uid, name, context=None):
if context is None:
context = {}
super(general_ledger, self).__init__(cr, uid, name, context=context)
self.query = ""
self.tot_currency = 0.0
self.period_sql = ""
self.sold_accounts = {}
self.sortby = 'sort_date'
self.localcontext.update( {
'time': time,
'lines': self.lines,
'sum_debit_account': self._sum_debit_account,
'sum_credit_account': self._sum_credit_account,
'sum_balance_account': self._sum_balance_account,
'sum_currency_amount_account': self._sum_currency_amount_account,
'get_children_accounts': self.get_children_accounts,
'get_fiscalyear': self._get_fiscalyear,
'get_journal': self._get_journal,
'get_account': self._get_account,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_filter': self._get_filter,
'get_sortby': self._get_sortby,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
'get_target_move': self._get_target_move,
})
self.context = context
def _sum_currency_amount_account(self, account):
self.cr.execute('SELECT sum(l.amount_currency) AS tot_currency \
FROM account_move_line l \
WHERE l.account_id = %s AND %s' %(account.id, self.query))
sum_currency = self.cr.fetchone()[0] or 0.0
if self.init_balance:
self.cr.execute('SELECT sum(l.amount_currency) AS tot_currency \
FROM account_move_line l \
WHERE l.account_id = %s AND %s '%(account.id, self.init_query))
sum_currency += self.cr.fetchone()[0] or 0.0
return sum_currency
def get_children_accounts(self, account):
res = []
currency_obj = self.pool.get('res.currency')
ids_acc = self.pool.get('account.account')._get_children_and_consol(self.cr, self.uid, account.id)
currency = account.currency_id and account.currency_id or account.company_id.currency_id
for child_account in self.pool.get('account.account').browse(self.cr, self.uid, ids_acc, context=self.context):
sql = """
SELECT count(id)
FROM account_move_line AS l
WHERE %s AND l.account_id = %%s
""" % (self.query)
self.cr.execute(sql, (child_account.id,))
num_entry = self.cr.fetchone()[0] or 0
sold_account = self._sum_balance_account(child_account)
self.sold_accounts[child_account.id] = sold_account
if self.display_account == 'movement':
if child_account.type != 'view' and num_entry <> 0:
res.append(child_account)
elif self.display_account == 'not_zero':
if child_account.type != 'view' and num_entry <> 0:
if not currency_obj.is_zero(self.cr, self.uid, currency, sold_account):
res.append(child_account)
else:
res.append(child_account)
if not res:
return [account]
return res
def lines(self, account):
""" Return all the account_move_line of account with their account code counterparts """
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted', '']
# First compute all counterpart strings for every move_id where this account appear.
# Currently, the counterpart info is used only in landscape mode
sql = """
SELECT m1.move_id,
array_to_string(ARRAY(SELECT DISTINCT a.code
FROM account_move_line m2
LEFT JOIN account_account a ON (m2.account_id=a.id)
WHERE m2.move_id = m1.move_id
AND m2.account_id<>%%s), ', ') AS counterpart
FROM (SELECT move_id
FROM account_move_line l
LEFT JOIN account_move am ON (am.id = l.move_id)
WHERE am.state IN %s and %s AND l.account_id = %%s GROUP BY move_id) m1
"""% (tuple(move_state), self.query)
self.cr.execute(sql, (account.id, account.id))
counterpart_res = self.cr.dictfetchall()
counterpart_accounts = {}
for i in counterpart_res:
counterpart_accounts[i['move_id']] = i['counterpart']
del counterpart_res
# Then select all account_move_line of this account
if self.sortby == 'sort_journal_partner':
sql_sort='j.code, p.name, l.move_id'
else:
sql_sort='l.date, l.move_id'
sql = """
SELECT l.id AS lid, l.date AS ldate, j.code AS lcode, l.currency_id,l.amount_currency,l.ref AS lref, l.name AS lname, COALESCE(l.debit,0) AS debit, COALESCE(l.credit,0) AS credit, l.period_id AS lperiod_id, l.partner_id AS lpartner_id,
m.name AS move_name, m.id AS mmove_id,per.code as period_code,
c.symbol AS currency_code,
i.id AS invoice_id, i.type AS invoice_type, i.number AS invoice_number,
p.name AS partner_name
FROM account_move_line l
JOIN account_move m on (l.move_id=m.id)
LEFT JOIN res_currency c on (l.currency_id=c.id)
LEFT JOIN res_partner p on (l.partner_id=p.id)
LEFT JOIN account_invoice i on (m.id =i.move_id)
LEFT JOIN account_period per on (per.id=l.period_id)
JOIN account_journal j on (l.journal_id=j.id)
WHERE %s AND m.state IN %s AND l.account_id = %%s ORDER by %s
""" %(self.query, tuple(move_state), sql_sort)
self.cr.execute(sql, (account.id,))
res_lines = self.cr.dictfetchall()
res_init = []
if res_lines and self.init_balance:
#FIXME: replace the label of lname with a string translatable
sql = """
SELECT 0 AS lid, '' AS ldate, '' AS lcode, COALESCE(SUM(l.amount_currency),0.0) AS amount_currency, '' AS lref, 'Initial Balance' AS lname, COALESCE(SUM(l.debit),0.0) AS debit, COALESCE(SUM(l.credit),0.0) AS credit, '' AS lperiod_id, '' AS lpartner_id,
'' AS move_name, '' AS mmove_id, '' AS period_code,
'' AS currency_code,
NULL AS currency_id,
'' AS invoice_id, '' AS invoice_type, '' AS invoice_number,
'' AS partner_name
FROM account_move_line l
LEFT JOIN account_move m on (l.move_id=m.id)
LEFT JOIN res_currency c on (l.currency_id=c.id)
LEFT JOIN res_partner p on (l.partner_id=p.id)
LEFT JOIN account_invoice i on (m.id =i.move_id)
JOIN account_journal j on (l.journal_id=j.id)
WHERE %s AND m.state IN %s AND l.account_id = %%s
""" %(self.init_query, tuple(move_state))
self.cr.execute(sql, (account.id,))
res_init = self.cr.dictfetchall()
res = res_init + res_lines
account_sum = 0.0
for l in res:
l['move'] = l['move_name'] != '/' and l['move_name'] or ('*'+str(l['mmove_id']))
l['partner'] = l['partner_name'] or ''
account_sum += l['debit'] - l['credit']
l['progress'] = account_sum
l['line_corresp'] = l['mmove_id'] == '' and ' ' or counterpart_accounts[l['mmove_id']].replace(', ',',')
# Modification of amount Currency
if l['credit'] > 0:
if l['amount_currency'] != None:
l['amount_currency'] = abs(l['amount_currency']) * -1
if l['amount_currency'] != None:
self.tot_currency = self.tot_currency + l['amount_currency']
return res
def _sum_debit_account(self, account):
if account.type == 'view':
return account.debit
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted','']
self.cr.execute('SELECT sum(debit) \
FROM account_move_line l \
JOIN account_move am ON (am.id = l.move_id) \
WHERE (l.account_id = %s) \
AND (am.state IN %s) \
AND '+ self.query +' '
,(account.id, tuple(move_state)))
sum_debit = self.cr.fetchone()[0] or 0.0
if self.init_balance:
self.cr.execute('SELECT sum(debit) \
FROM account_move_line l \
JOIN account_move am ON (am.id = l.move_id) \
WHERE (l.account_id = %s) \
AND (am.state IN %s) \
AND '+ self.init_query +' '
,(account.id, tuple(move_state)))
# Add initial balance to the result
sum_debit += self.cr.fetchone()[0] or 0.0
return sum_debit
def _sum_credit_account(self, account):
if account.type == 'view':
return account.credit
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted','']
self.cr.execute('SELECT sum(credit) \
FROM account_move_line l \
JOIN account_move am ON (am.id = l.move_id) \
WHERE (l.account_id = %s) \
AND (am.state IN %s) \
AND '+ self.query +' '
,(account.id, tuple(move_state)))
sum_credit = self.cr.fetchone()[0] or 0.0
if self.init_balance:
self.cr.execute('SELECT sum(credit) \
FROM account_move_line l \
JOIN account_move am ON (am.id = l.move_id) \
WHERE (l.account_id = %s) \
AND (am.state IN %s) \
AND '+ self.init_query +' '
,(account.id, tuple(move_state)))
# Add initial balance to the result
sum_credit += self.cr.fetchone()[0] or 0.0
return sum_credit
def _sum_balance_account(self, account):
if account.type == 'view':
return account.balance
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted','']
self.cr.execute('SELECT (sum(debit) - sum(credit)) as tot_balance \
FROM account_move_line l \
JOIN account_move am ON (am.id = l.move_id) \
WHERE (l.account_id = %s) \
AND (am.state IN %s) \
AND '+ self.query +' '
,(account.id, tuple(move_state)))
sum_balance = self.cr.fetchone()[0] or 0.0
if self.init_balance:
self.cr.execute('SELECT (sum(debit) - sum(credit)) as tot_balance \
FROM account_move_line l \
JOIN account_move am ON (am.id = l.move_id) \
WHERE (l.account_id = %s) \
AND (am.state IN %s) \
AND '+ self.init_query +' '
,(account.id, tuple(move_state)))
# Add initial balance to the result
sum_balance += self.cr.fetchone()[0] or 0.0
return sum_balance
def _get_account(self, data):
if data['model'] == 'account.account':
return self.pool.get('account.account').browse(self.cr, self.uid, data['form']['id']).company_id.name
return super(general_ledger ,self)._get_account(data)
def _get_sortby(self, data):
if self.sortby == 'sort_date':
return self._translate('Date')
elif self.sortby == 'sort_journal_partner':
return self._translate('Journal & Partner')
return self._translate('Date')
class report_generalledger(osv.AbstractModel):
_name = 'report.account.report_generalledger'
_inherit = 'report.abstract_report'
_template = 'account.report_generalledger'
_wrapped_report_class = general_ledger
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
IssamLaradji/scikit-learn | sklearn/qda.py | 15 | 7139 | """
Quadratic Discriminant Analysis
"""
# Author: Matthieu Perrot <[email protected]>
#
# License: BSD 3 clause
import warnings
import numpy as np
from .base import BaseEstimator, ClassifierMixin
from .externals.six.moves import xrange
from .utils import check_array, check_X_y
__all__ = ['QDA']
class QDA(BaseEstimator, ClassifierMixin):
"""
Quadratic Discriminant Analysis (QDA)
A classifier with a quadratic decision boundary, generated
by fitting class conditional densities to the data
and using Bayes' rule.
The model fits a Gaussian density to each class.
Parameters
----------
priors : array, optional, shape = [n_classes]
Priors on classes
reg_param : float, optional
Regularizes the covariance estimate as
``(1-reg_param)*Sigma + reg_param*np.eye(n_features)``
Attributes
----------
covariances_ : list of array-like, shape = [n_features, n_features]
Covariance matrices of each class.
means_ : array-like, shape = [n_classes, n_features]
Class means.
priors_ : array-like, shape = [n_classes]
Class priors (sum to 1).
rotations_ : list of arrays
For each class an array of shape [n_samples, n_samples], the
rotation of the Gaussian distribution, i.e. its principal axis.
scalings_ : array-like, shape = [n_classes, n_features]
Contains the scaling of the Gaussian
distributions along the principal axes for each
class, i.e. the variance in the rotated coordinate system.
Examples
--------
>>> from sklearn.qda import QDA
>>> import numpy as np
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> y = np.array([1, 1, 1, 2, 2, 2])
>>> clf = QDA()
>>> clf.fit(X, y)
QDA(priors=None, reg_param=0.0)
>>> print(clf.predict([[-0.8, -1]]))
[1]
See also
--------
sklearn.lda.LDA: Linear discriminant analysis
"""
def __init__(self, priors=None, reg_param=0.):
self.priors = np.asarray(priors) if priors is not None else None
self.reg_param = reg_param
def fit(self, X, y, store_covariances=False, tol=1.0e-4):
"""
Fit the QDA model according to the given training data and parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target values (integers)
store_covariances : boolean
If True the covariance matrices are computed and stored in the
`self.covariances_` attribute.
"""
X, y = check_X_y(X, y)
self.classes_, y = np.unique(y, return_inverse=True)
n_samples, n_features = X.shape
n_classes = len(self.classes_)
if n_classes < 2:
raise ValueError('y has less than 2 classes')
if self.priors is None:
self.priors_ = np.bincount(y) / float(n_samples)
else:
self.priors_ = self.priors
cov = None
if store_covariances:
cov = []
means = []
scalings = []
rotations = []
for ind in xrange(n_classes):
Xg = X[y == ind, :]
meang = Xg.mean(0)
means.append(meang)
Xgc = Xg - meang
# Xgc = U * S * V.T
U, S, Vt = np.linalg.svd(Xgc, full_matrices=False)
rank = np.sum(S > tol)
if rank < n_features:
warnings.warn("Variables are collinear")
S2 = (S ** 2) / (len(Xg) - 1)
S2 = ((1 - self.reg_param) * S2) + self.reg_param
if store_covariances:
# cov = V * (S^2 / (n-1)) * V.T
cov.append(np.dot(S2 * Vt.T, Vt))
scalings.append(S2)
rotations.append(Vt.T)
if store_covariances:
self.covariances_ = cov
self.means_ = np.asarray(means)
self.scalings_ = np.asarray(scalings)
self.rotations_ = rotations
return self
def _decision_function(self, X):
X = check_array(X)
norm2 = []
for i in range(len(self.classes_)):
R = self.rotations_[i]
S = self.scalings_[i]
Xm = X - self.means_[i]
X2 = np.dot(Xm, R * (S ** (-0.5)))
norm2.append(np.sum(X2 ** 2, 1))
norm2 = np.array(norm2).T # shape = [len(X), n_classes]
return (-0.5 * (norm2 + np.sum(np.log(self.scalings_), 1))
+ np.log(self.priors_))
def decision_function(self, X):
"""Apply decision function to an array of samples.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples (test vectors).
Returns
-------
C : array, shape = [n_samples, n_classes] or [n_samples,]
Decision function values related to each class, per sample.
In the two-class case, the shape is [n_samples,], giving the
log likelihood ratio of the positive class.
"""
dec_func = self._decision_function(X)
# handle special case of two classes
if len(self.classes_) == 2:
return dec_func[:, 1] - dec_func[:, 0]
return dec_func
def predict(self, X):
"""Perform classification on an array of test vectors X.
The predicted class C for each sample in X is returned.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array, shape = [n_samples]
"""
d = self._decision_function(X)
y_pred = self.classes_.take(d.argmax(1))
return y_pred
def predict_proba(self, X):
"""Return posterior probabilities of classification.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples/test vectors.
Returns
-------
C : array, shape = [n_samples, n_classes]
Posterior probabilities of classification per class.
"""
values = self._decision_function(X)
# compute the likelihood of the underlying gaussian models
# up to a multiplicative constant.
likelihood = np.exp(values - values.max(axis=1)[:, np.newaxis])
# compute posterior probabilities
return likelihood / likelihood.sum(axis=1)[:, np.newaxis]
def predict_log_proba(self, X):
"""Return posterior probabilities of classification.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples/test vectors.
Returns
-------
C : array, shape = [n_samples, n_classes]
Posterior log-probabilities of classification per class.
"""
# XXX : can do better to avoid precision overflows
probas_ = self.predict_proba(X)
return np.log(probas_)
| bsd-3-clause |
danielfreeman11/convex-nets | LaunchScripts/ConnectedCompsCalc.py | 1 | 22577 | import numpy as np
import sys
#Usage:
#python thisprog.py threshold numofnetworks
#Will randomly initialize numofnetworks neural networks and train them until the error on a training set is less than threshold
#Will then try to interpolate between these networks while keeping error below that of threshold.
#Will tabulate the number of connected components found in this way
#Simple network: Given three integers a,b,c, [-100,100] chooses three random x-values, and evaluates
#the quadratic function a*x^2 + b*x + c at those values.
def func(x,a,b,c):
return x*x*a + x*b + c
def generatecandidate3(a,b,c):
candidate = [np.random.random() for x in xrange(1)]
candidatesolutions = [func(x,a,b,c) for x in candidate]
return candidate, candidatesolutions
import copy
alpha,hidden_dim,hidden_dim2 = (.001,4,4)
threshrange = np.linspace(.03,.1,101)
thresh = threshrange[int(sys.argv[1])%100]
synapses = []
#Testing starting in the same place
#synapse0 = 2*np.random.random((1,hidden_dim)) - 1
#synapse1 = 2*np.random.random((hidden_dim,hidden_dim2)) - 1
#synapse2 = 2*np.random.random((hidden_dim2,1)) - 1
for i in xrange(int(sys.argv[2])):
synapse_0 = 2*np.random.random((1,hidden_dim)) - 1
synapse_1 = 2*np.random.random((hidden_dim,hidden_dim2)) - 1
synapse_2 = 2*np.random.random((hidden_dim2,1)) - 1
#synapse_0 = copy.deepcopy(synapse0)
#synapse_1 = copy.deepcopy(synapse1)
#synapse_2 = copy.deepcopy(synapse2)
#remove the comment to get random initialization
stopcond = True
while stopcond:
#print 'epoch:' + str(e)
X = []
y = []
for i in xrange(10000):
a,b = generatecandidate3(.5,.25,.1)
X.append(a)
y.append(b)
X= np.array(X)
y=np.array(y)
j = 0
while stopcond:
#if j%5000 == 0: print j
layer_1 = 1/(1+np.exp(-(np.dot(X,synapse_0))))
#if(False):
# dropout_percent = .1
# layer_1 *= np.random.binomial([np.ones((len(X),hidden_dim))],1-dropout_percent)[0] * (1.0/(1-dropout_percent))
layer_2 = 1/(1+np.exp(-(np.dot(layer_1,synapse_1))))
#if(True):
# dropout_percent = .2
# layer_2 *= np.random.binomial([np.ones((len(layer_1),hidden_dim2))],1-dropout_percent)[0] * (1.0/(1-dropout_percent))
layer_3 = 1/(1+np.exp(-(np.dot(layer_2,synapse_2))))
#if(False):
# dropout_percent = .25
# layer_2 *= np.random.binomial([np.ones((len(layer_2),2))],1-dropout_percent)[0] * (1.0/(1-dropout_percent))
layer_3_delta = (layer_3- y)*(layer_3*(1-layer_3))
layer_2_delta = layer_3_delta.dot(synapse_2.T) * (layer_2 * (1-layer_2))
layer_1_delta = layer_2_delta.dot(synapse_1.T) * (layer_1 * (1-layer_1))
synapse_2 -= (alpha * layer_2.T.dot(layer_3_delta))
synapse_1 -= (alpha * layer_1.T.dot(layer_2_delta))
synapse_0 -= (alpha * X.T.dot(layer_1_delta))
# how much did we miss the target value?
layer_3_error = layer_3 - y
#if (j%5000) == 0:
# print "Error after "+str(j)+" iterations:" + str(np.mean(np.abs(layer_3_error)))
if np.mean(np.abs(layer_3_error)) < thresh:
#print "Changing stopcond!"
stopcond = False
j+=1
#remove the comment to get random initialization
synapses.append([synapse_0,synapse_1,synapse_2])
#Idea: Take two networks as input. Construct string connecting two nework with "beads" along the string.
#Stochastically (monte carlo? simulated annealing?) wiggle the beads until the max on the beads is minimized
from random import gauss
import copy
def make_rand_vector(dims):
vec = [gauss(0, 1) for i in range(dims)]
mag = sum(x**2 for x in vec) ** .5
return [x/mag for x in vec]
#Definition for test set:
'''X = []
y = []
for i in xrange(100):
j = i/100.
a,b = [[j],[func(j,.5,.25,.1)]]
X.append(a)
y.append(b)
X= np.array(X)
y=np.array(y)'''
#returns a later thats t-between synapse1 and synapse2 (t ranges from 0 to 1)
def synapse_interpolate(synapse1, synapse2, t):
return (synapse2-synapse1)*t + synapse1
X = []
y = []
def GenTest(X, y):
X = []
y = []
for i in xrange(1000):
a,b = generatecandidate3(.5,.25,.1)
X.append(a)
y.append(b)
return np.array(X), np.array(y)
X, y = GenTest(X,y)
#Simple container to hold the weights defined on the beads
class WeightString:
def __init__(self, w1, w2, numbeads, threshold, springk):
self.w1 = w1
self.w2 = w2
self.beads = []
self.velocity = []
self.threshold = threshold
self.springk = springk
for n in xrange(numbeads):
beaddata = []
for k in xrange(len(self.w1)):
beaddata.append(synapse_interpolate(self.w1[k],self.w2[k], (n + 1.)/(numbeads+1.)))
self.beads.append(beaddata)
self.velocity = copy.deepcopy(self.beads)
for b in self.velocity:
for v in b:
v = 0.*v
#self.beads.reverse()
self.InitialEnergy = self.SpringEnergy()
self.AllBeads = copy.deepcopy(self.beads)
self.AllBeads.insert(0,self.w1)
self.AllBeads.append(self.w2)
self.ConvergedList = [False for f in xrange(len(self.AllBeads))]
self.ConvergedList[0] = True
self.ConvergedList[-1] = True
def SpringNorm(self, order):
total = 0.
#Energy between mobile beads
for i,b in enumerate(self.AllBeads):
if i < len(self.AllBeads)-1:
#print "Tallying energy between bead " + str(i) + " and bead " + str(i+1)
subtotal = 0.
for j in xrange(len(b)):
subtotal += np.linalg.norm(np.subtract(self.AllBeads[i][j],self.AllBeads[i+1][j]),ord=order)#/len(self.beads[0][j])
total+=subtotal
return total#/len(self.beads)
def SpringEnergy(self):
total = 0.
#Energy between the pinned, immobile weight and the first bead
subtotal = 0.
for j in xrange(len(self.beads[0])):
subtotal += np.linalg.norm(np.subtract(self.w1[j],self.beads[0][j]),ord=2)/len(self.beads[0][j])
total+=subtotal
#Energy between mobile beads
for i,b in enumerate(self.beads):
if i < len(self.beads)-1:
#print "Tallying energy between bead " + str(i) + " and bead " + str(i+1)
subtotal = 0.
for j in xrange(len(b)):
subtotal += np.linalg.norm(np.subtract(self.beads[i][j],self.beads[i+1][j]),ord=2)/len(self.beads[0][j])
total+=subtotal
#Energy between pinned, immobile final weights, and the last bead
subtotal = 0.
for j in xrange(len(self.beads[-1])):
subtotal += np.linalg.norm(np.subtract(self.w2[j],self.beads[-1][j]),ord=2)/len(self.beads[0][j])
total+=subtotal
return total/len(self.beads)
def SGDBead(self, bead, X, y):
layers = []
l1 = 1/(1+np.exp(-(np.dot(X,self.AllBeads[bead][0]))))
layers.append(l1)
for i,b in enumerate(self.AllBeads[bead][1:]):
l = 1/(1+np.exp(-(np.dot(layers[-1],b))))
layers.append(l)
layersdelta = []
l3 = (layers[-1] - y)*(layers[-1]*(1-layers[-1])) #+ (1./regparam)*OldSpringEnergy*np.ones(np.shape(y))
layersdelta.append(l3)
for i,l in enumerate(layers[:-1]):
ldelta = layersdelta[-1].dot(self.AllBeads[bead][-1-i].T) * (layers[:-1][-1-i]) * (1- (layers[:-1][-1-i]))
layersdelta.append(ldelta)
for i in xrange(len(layers)-1):
if -i-1 != 0:
self.AllBeads[bead][-i-1] -= .1*layers[-i-2].T.dot(layersdelta[i])
else:
self.AllBeads[bead][0] -= .1*X.T.dot(layersdelta[-1])
finalerror = (layers[-1] - y)
return np.mean(np.abs(finalerror))
#monte carlo update step
def UpdateBead(self, temperature, bead, X, y):
regparam = 100.
OldSpringEnergy = self.SpringEnergy()
OldMax = [EvalNet(b,X)-y for b in self.beads]
OldMaxError = max([np.mean(np.abs(om)) for om in OldMax])
oe = OldSpringEnergy/100000. + OldMaxError
#print "Old SE: " + str(OldSpringEnergy)
#print "Old Max: " + str(OldMax)
####print "Oldmaxerror: " + str(OldMaxError)
oldweight = copy.deepcopy(self.beads[bead])
layers = []
#print bead[0]
l1 = 1/(1+np.exp(-(np.dot(X,self.beads[bead][0]))))
layers.append(l1)
for i,b in enumerate(self.beads[bead][1:]):
l = 1/(1+np.exp(-(np.dot(layers[-1],b))))
layers.append(l)
#layer_3_delta = (layer_3- y)*(layer_3*(1-layer_3))
#layer_2_delta = layer_3_delta.dot(synapse_2.T) * (layer_2 * (1-layer_2))
#layer_1_delta = layer_2_delta.dot(synapse_1.T) * (layer_1 * (1-layer_1))
#layersdelta = []
layersdelta = []
l3 = (layers[-1] - y)*(layers[-1]*(1-layers[-1])) #+ (1./regparam)*OldSpringEnergy*np.ones(np.shape(y))
layersdelta.append(l3)
for i,l in enumerate(layers[:-1]):
ldelta = layersdelta[-1].dot(self.beads[bead][-1-i].T) * (layers[:-1][-1-i]) * (1- (layers[:-1][-1-i]))
layersdelta.append(ldelta)
for i in xrange(len(layers)-1):
#print i
#print self.beads[bead][-i-1]
#rint layers[-i-2].T
#print layersdelta[-i-1]
#print layers[-i-2].T.dot(layersdelta[-i-1])
if -i-1 != 0:
self.beads[bead][-i-1] -= .1*layers[-i-2].T.dot(layersdelta[i])
else:
self.beads[bead][0] -= .1*X.T.dot(layersdelta[-1])
#The code below regularizes the network so that they stay near each other in weight space
'''if bead == 0:
self.beads[bead][-i-1] -= (np.subtract(self.beads[bead][-i-1],self.w1[-i-1]) + np.subtract(self.beads[bead+1][-i-1],self.beads[bead][-i-1]))/regparam
if bead == len(self.beads)-1:
self.beads[bead][-i-1] -= (np.subtract(self.w2[-i-1],self.beads[bead][-i-1]) + np.subtract(self.beads[bead][-i-1],self.beads[bead-1][-i-1]))/regparam
if (bead > 0 and bead < len(self.beads)-1):
self.beads[bead][-i-1] -= (np.subtract(self.beads[bead+1][-i-1],self.beads[bead][-i-1]) + \
np.subtract(self.beads[bead][-i-1],self.beads[bead-1][-i-1]))/regparam'''
#layers.reverse()
# how much did we miss the target value?
NewSpringEnergy = self.SpringEnergy()
finalerror = (layers[-1] - y) #(1./regparam)*NewSpringEnergy*np.ones(np.shape(y))
NewMaxError = np.mean(np.abs(finalerror))
#print "New SE: " + str(NewSpringEnergy)
#print "Old Max: " + str(OldMax)
####print "Newmaxerror: " + str(NewMaxError)
ne = NewSpringEnergy/100000. + NewMaxError
#print "Newtotal: " + str(ne)
####print "\n"
myrand = np.random.rand()
####print "rand is: " + str(myrand) + " and boltzmann weight is " + str(np.exp(-(NewSpringEnergy - OldSpringEnergy)/temperature))
if NewSpringEnergy > OldSpringEnergy:
#if NewSpringEnergy > self.InitialEnergy:
if NewMaxError > OldMaxError:
self.beads[bead]=oldweight
else:
if myrand > np.exp(-(NewSpringEnergy - OldSpringEnergy)/temperature):
#if myrand > np.exp(-(NewSpringEnergy - self.InitialEnergy)/temperature):
#print "Rejecting proposal"
self.beads[bead]=oldweight
return True
#def JUST MAKE A PURE KINETIC EVOLVER, SWAP BETWEEN KINETIC EVOLUTION AND GRADIENT DESCENT
def UpdateKinetic(self, dt, k):
for bead in xrange(len(self.beads)):
for i in xrange(len(self.beads[bead])):
self.beads[bead][i] += dt*self.velocity[bead][i]
for bead in xrange(len(self.beads)):
for i in xrange(len(self.beads[bead])):
if bead == 0:
self.velocity[bead][i] += -dt*k*(np.subtract(self.beads[bead][i],self.w1[i]) + np.subtract(self.beads[bead+1][i],self.beads[bead][i]))
if bead == len(self.beads)-1:
self.velocity[bead][i] += -dt*k*(np.subtract(self.w2[i],self.beads[bead][i]) + np.subtract(self.beads[bead][i],self.beads[bead-1][i]))
if (bead > 0 and bead < len(self.beads)-1):
self.velocity[bead][i] += -dt*k*(np.subtract(self.beads[bead+1][i],self.beads[bead][i]) + \
np.subtract(self.beads[bead][i],self.beads[bead-1][i]))
#self.velocity[bead][i] -= .1*self.velocity[bead][i]
#monte carlo update step
def UpdateBeadPureKinetic(self, temperature, bead):
OldSpringEnergy = self.SpringEnergy()
#OldMax = [EvalNet(b,X)-y for b in self.beads]
#OldMaxError = max([np.mean(np.abs(om)) for om in OldMax])
#oe = OldSpringEnergy/100000. + OldMaxError
##print "Old SE: " + str(OldSpringEnergy)
#print "Old Max: " + str(OldMax)
#print "Oldmaxerror: " + str(OldMaxError)
#print "Oldtotal: " + str(oe)
oldweight = copy.deepcopy(self.beads[bead])
randupdates = []
for i,syn in enumerate(self.beads[bead]):
#create random perturbation to weight matrix with correct shape
addtobead = np.reshape(make_rand_vector(syn.size),syn.shape)
#add it to this particular bead
self.beads[bead][i]+=.1*addtobead
NewSpringEnergy = self.SpringEnergy()
#NewMax = [EvalNet(b,X)-y for b in self.beads]
#NewMaxError = max([np.mean(np.abs(om)) for om in OldMax])
##print "New SE: " + str(OldSpringEnergy)
#print "Old Max: " + str(OldMax)
#print "Newmaxerror: " + str(OldMaxError)
#ne = NewSpringEnergy/100000. + NewMaxError
#print "Newtotal: " + str(ne)
##print "\n"
#Gibbs sampling
#if OldSpringError/100. + OldMaxError < NewSpringError/100. + NewMaxError:
myrand = np.random.rand()
##print "rand is: " + str(myrand) + " and boltzmann weight is " + str(np.exp(-(NewSpringEnergy - OldSpringEnergy)/temperature))
if NewSpringEnergy > OldSpringEnergy:
if myrand > np.exp(-(NewSpringEnergy - OldSpringEnergy)/temperature):
##print "Rejecting proposal"
self.beads[bead]=oldweight
return True
test = WeightString(synapses[0],synapses[1],5,1,1)
#Simple function to evaluate network
def EvalNet(net, X):
layer_1 = 1/(1+np.exp(-(np.dot(X,net[0]))))
layer_2 = 1/(1+np.exp(-(np.dot(layer_1,net[1]))))
layer_3 = 1/(1+np.exp(-(np.dot(layer_2,net[2]))))
# how much did we miss the target value?
#layer_3_error = layer_3 - y
return layer_3
def BeadError(X, y, bead):
X= np.array(X)
y=np.array(y)
layer_1 = 1/(1+np.exp(-(np.dot(X,bead[0]))))
layer_2 = 1/(1+np.exp(-(np.dot(layer_1,bead[1]))))
layer_3 = 1/(1+np.exp(-(np.dot(layer_2,bead[2]))))
# how much did we miss the target value?
layer_3_error = layer_3 - y
return np.mean(np.abs(layer_3_error))
def InterpBeadError(X, y, bead1, bead2, write = False, name = "00"):
'''X = []
y = []
for i in xrange(1000):
a,b = generatecandidate3(.5,.25,.1)
X.append(a)
y.append(b)'''
X= np.array(X)
y=np.array(y)
errors = []
for tt in xrange(100):
#Should make this architecture independent at some point
t = tt/100.
layer_1 = 1/(1+np.exp(-(np.dot(X,synapse_interpolate(bead1[0],bead2[0],t)))))
layer_2 = 1/(1+np.exp(-(np.dot(layer_1,synapse_interpolate(bead1[1],bead2[1],t)))))
layer_3 = 1/(1+np.exp(-(np.dot(layer_2,synapse_interpolate(bead1[2],bead2[2],t)))))
# how much did we miss the target value?
layer_3_error = layer_3 - y
errors.append(np.mean(np.abs(layer_3_error)))
if write == True:
with open("f" + str(name) + ".out",'w+') as f:
for e in errors:
f.write(str(e) + "\n")
return max(errors)
results = []
connecteddict = {}
for i1 in xrange(len(synapses)):
connecteddict[i1] = 'not connected'
for i1 in xrange(len(synapses)):
#print i1
for i2 in xrange(len(synapses)):
if i2 > i1 and ((connecteddict[i1] != connecteddict[i2]) or (connecteddict[i1] == 'not connected' or connecteddict[i2] == 'not connected')) :
test = WeightString(synapses[i1],synapses[i2],1,1,1)
training_threshold = thresh
depth = 0
d_max = 10
#Check error between beads
#Alg: for each bead at depth i, SGD until converged.
#For beads with max error along path too large, add another bead between them, repeat
while (depth < d_max):
X, y = GenTest(X,y)
counter = 0
for i,c in enumerate(test.ConvergedList):
if c == False:
error = BeadError(X, y, test.AllBeads[i])
#print error
while error > .5 * training_threshold and counter < 40000:
counter += 1
error = test.SGDBead(i, X, y)
#if counter%5000==0:
# print counter
# print error
test.ConvergedList[i] = True
#print test.ConvergedList
interperrors = []
for b in xrange(len(test.AllBeads)-1):
e = InterpBeadError(X,y,test.AllBeads[b],test.AllBeads[b+1])
interperrors.append(e)
#print interperrors
if max(interperrors) < training_threshold:
depth = 2*d_max
#print test.ConvergedList
#print test.SpringNorm(2)
#print "Done!"
else:
#Interperrors stores the maximum error on the path between beads
#shift index to account for added beads
shift = 0
for i, ie in enumerate(interperrors):
if ie > training_threshold:
beaddata = []
for k in xrange(len(test.w1)):
beaddata.append(synapse_interpolate(test.AllBeads[i+shift][k],test.AllBeads[i+shift+1][k], .5))
test.AllBeads.insert(i+shift+1,beaddata)
test.ConvergedList.insert(i+shift+1, False)
shift+=1
#print test.ConvergedList
#print test.SpringNorm(2)
#print d_max
depth += 1
if depth == 2*d_max:
results.append([i1,i2,test.SpringNorm(2),"Connected"])
if connecteddict[i1] == 'not connected' and connecteddict[i2] == 'not connected':
connecteddict[i1] = i1
connecteddict[i2] = i1
if connecteddict[i1] == 'not connected':
connecteddict[i1] = connecteddict[i2]
else:
if connecteddict[i2] == 'not connected':
connecteddict[i2] = connecteddict[i1]
else:
if connecteddict[i1] != 'not connected' and connecteddict[i2] != 'not connected':
hold = connecteddict[i2]
connecteddict[i2] = connecteddict[i1]
for h in xrange(len(synapses)):
if connecteddict[h] == hold:
connecteddict[h] = connecteddict[i1]
else:
results.append([i1,i2,test.SpringNorm(2),"Disconnected"])
#print results[-1]
uniquecomps = []
totalcomps = 0
for i in xrange(len(synapses)):
if not (connecteddict[i] in uniquecomps):
uniquecomps.append(connecteddict[i])
if connecteddict[i] == 'not connected':
totalcomps += 1
#print i,connecteddict[i]
notconoffset = 0
if 'not connected' in uniquecomps:
notconoffset = -1
print "Thresh: " + str(thresh)
print "Comps: " + str(len(uniquecomps) + notconoffset + totalcomps)
#for i in xrange(len(synapses)):
# print connecteddict[i]
connsum = []
for r in results:
if r[3] == "Connected":
connsum.append(r[2])
#print r[2]
print "***"
print np.average(connsum)
print np.std(connsum)
| mit |
zsiciarz/variablestars.net | observations/utils.py | 1 | 1279 | import time
def jd_now():
"""
Returns Julian Date at the current moment.
"""
return 2440587.5 + time.time() / 86400.0
def normalize_star_name(name):
"""
Normalize star name with GCVS names, for example: V339 -> V0339.
"""
digits = "123456789"
if name[0] == "V" and name[1] in digits and name[4] not in digits:
name = "V0" + name[1:]
return name
def dict_to_observation(row, observer):
from .models import Observation
from stars.models import Star
name = normalize_star_name(row["name"])
star = Star.objects.get(name=name)
fainter_than = "<" in row["magnitude"]
magnitude = float(row["magnitude"].replace("<", ""))
jd = float(row["date"])
# TODO: use get_or_create with defaults
try:
observation = Observation.objects.get(observer=observer, star=star, jd=jd,)
except Observation.DoesNotExist:
observation = Observation(observer=observer, star=star, jd=jd,)
observation.magnitude = magnitude
observation.fainter_than = fainter_than
observation.comp1 = row["comp1"]
observation.comp2 = row.get("comp2", "")
observation.chart = row["chart"]
observation.comment_code = row["comment_code"]
observation.notes = row["notes"]
return observation
| mit |
jswope00/griffinx | common/djangoapps/student/roles.py | 15 | 11247 | """
Classes used to model the roles used in the courseware. Each role is responsible for checking membership,
adding users, removing users, and listing members
"""
from abc import ABCMeta, abstractmethod
from django.contrib.auth.models import User
import logging
from student.models import CourseAccessRole
from xmodule_django.models import CourseKeyField
log = logging.getLogger(__name__)
# A list of registered access roles.
REGISTERED_ACCESS_ROLES = {}
def register_access_role(cls):
"""
Decorator that allows access roles to be registered within the roles module and referenced by their
string values.
Assumes that the decorated class has a "ROLE" attribute, defining its type.
"""
try:
role_name = getattr(cls, 'ROLE')
REGISTERED_ACCESS_ROLES[role_name] = cls
except AttributeError:
log.exception(u"Unable to register Access Role with attribute 'ROLE'.")
return cls
class RoleCache(object):
"""
A cache of the CourseAccessRoles held by a particular user
"""
def __init__(self, user):
self._roles = set(
CourseAccessRole.objects.filter(user=user).all()
)
def has_role(self, role, course_id, org):
"""
Return whether this RoleCache contains a role with the specified role, course_id, and org
"""
return any(
access_role.role == role and
access_role.course_id == course_id and
access_role.org == org
for access_role in self._roles
)
class AccessRole(object):
"""
Object representing a role with particular access to a resource
"""
__metaclass__ = ABCMeta
@abstractmethod
def has_user(self, user): # pylint: disable=unused-argument
"""
Return whether the supplied django user has access to this role.
"""
return False
@abstractmethod
def add_users(self, *users):
"""
Add the role to the supplied django users.
"""
pass
@abstractmethod
def remove_users(self, *users):
"""
Remove the role from the supplied django users.
"""
pass
@abstractmethod
def users_with_role(self):
"""
Return a django QuerySet for all of the users with this role
"""
return User.objects.none()
class GlobalStaff(AccessRole):
"""
The global staff role
"""
def has_user(self, user):
return user.is_staff
def add_users(self, *users):
for user in users:
if (user.is_authenticated() and user.is_active):
user.is_staff = True
user.save()
def remove_users(self, *users):
for user in users:
# don't check is_authenticated nor is_active on purpose
user.is_staff = False
user.save()
def users_with_role(self):
raise Exception("This operation is un-indexed, and shouldn't be used")
class RoleBase(AccessRole):
"""
Roles by type (e.g., instructor, beta_user) and optionally org, course_key
"""
def __init__(self, role_name, org='', course_key=None):
"""
Create role from required role_name w/ optional org and course_key. You may just provide a role
name if it's a global role (not constrained to an org or course). Provide org if constrained to
an org. Provide org and course if constrained to a course. Although, you should use the subclasses
for all of these.
"""
super(RoleBase, self).__init__()
self.org = org
self.course_key = course_key
self._role_name = role_name
def has_user(self, user):
"""
Return whether the supplied django user has access to this role.
"""
if not (user.is_authenticated() and user.is_active):
return False
# pylint: disable=protected-access
if not hasattr(user, '_roles'):
# Cache a list of tuples identifying the particular roles that a user has
# Stored as tuples, rather than django models, to make it cheaper to construct objects for comparison
user._roles = RoleCache(user)
return user._roles.has_role(self._role_name, self.course_key, self.org)
def add_users(self, *users):
"""
Add the supplied django users to this role.
"""
# silently ignores anonymous and inactive users so that any that are
# legit get updated.
from student.models import CourseAccessRole
for user in users:
if user.is_authenticated and user.is_active and not self.has_user(user):
entry = CourseAccessRole(user=user, role=self._role_name, course_id=self.course_key, org=self.org)
entry.save()
if hasattr(user, '_roles'):
del user._roles
def remove_users(self, *users):
"""
Remove the supplied django users from this role.
"""
entries = CourseAccessRole.objects.filter(
user__in=users, role=self._role_name, org=self.org, course_id=self.course_key
)
entries.delete()
for user in users:
if hasattr(user, '_roles'):
del user._roles
def users_with_role(self):
"""
Return a django QuerySet for all of the users with this role
"""
# Org roles don't query by CourseKey, so use CourseKeyField.Empty for that query
if self.course_key is None:
self.course_key = CourseKeyField.Empty
entries = User.objects.filter(
courseaccessrole__role=self._role_name,
courseaccessrole__org=self.org,
courseaccessrole__course_id=self.course_key
)
return entries
class CourseRole(RoleBase):
"""
A named role in a particular course
"""
def __init__(self, role, course_key):
"""
Args:
course_key (CourseKey)
"""
super(CourseRole, self).__init__(role, course_key.org, course_key)
@classmethod
def course_group_already_exists(self, course_key):
return CourseAccessRole.objects.filter(org=course_key.org, course_id=course_key).exists()
class OrgRole(RoleBase):
"""
A named role in a particular org independent of course
"""
def __init__(self, role, org):
super(OrgRole, self).__init__(role, org)
@register_access_role
class CourseStaffRole(CourseRole):
"""A Staff member of a course"""
ROLE = 'staff'
def __init__(self, *args, **kwargs):
super(CourseStaffRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseInstructorRole(CourseRole):
"""A course Instructor"""
ROLE = 'instructor'
def __init__(self, *args, **kwargs):
super(CourseInstructorRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseFinanceAdminRole(CourseRole):
"""A course staff member with privileges to review financial data."""
ROLE = 'finance_admin'
def __init__(self, *args, **kwargs):
super(CourseFinanceAdminRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseSalesAdminRole(CourseRole):
"""A course staff member with privileges to perform sales operations. """
ROLE = 'sales_admin'
def __init__(self, *args, **kwargs):
super(CourseSalesAdminRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseBetaTesterRole(CourseRole):
"""A course Beta Tester"""
ROLE = 'beta_testers'
def __init__(self, *args, **kwargs):
super(CourseBetaTesterRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class LibraryUserRole(CourseRole):
"""
A user who can view a library and import content from it, but not edit it.
Used in Studio only.
"""
ROLE = 'library_user'
def __init__(self, *args, **kwargs):
super(LibraryUserRole, self).__init__(self.ROLE, *args, **kwargs)
class OrgStaffRole(OrgRole):
"""An organization staff member"""
def __init__(self, *args, **kwargs):
super(OrgStaffRole, self).__init__('staff', *args, **kwargs)
class OrgInstructorRole(OrgRole):
"""An organization instructor"""
def __init__(self, *args, **kwargs):
super(OrgInstructorRole, self).__init__('instructor', *args, **kwargs)
class OrgLibraryUserRole(OrgRole):
"""
A user who can view any libraries in an org and import content from them, but not edit them.
Used in Studio only.
"""
ROLE = LibraryUserRole.ROLE
def __init__(self, *args, **kwargs):
super(OrgLibraryUserRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseCreatorRole(RoleBase):
"""
This is the group of people who have permission to create new courses (we may want to eventually
make this an org based role).
"""
ROLE = "course_creator_group"
def __init__(self, *args, **kwargs):
super(CourseCreatorRole, self).__init__(self.ROLE, *args, **kwargs)
class UserBasedRole(object):
"""
Backward mapping: given a user, manipulate the courses and roles
"""
def __init__(self, user, role):
"""
Create a UserBasedRole accessor: for a given user and role (e.g., "instructor")
"""
self.user = user
self.role = role
def has_course(self, course_key):
"""
Return whether the role's user has the configured role access to the passed course
"""
if not (self.user.is_authenticated() and self.user.is_active):
return False
# pylint: disable=protected-access
if not hasattr(self.user, '_roles'):
self.user._roles = RoleCache(self.user)
return self.user._roles.has_role(self.role, course_key, course_key.org)
def add_course(self, *course_keys):
"""
Grant this object's user the object's role for the supplied courses
"""
if self.user.is_authenticated and self.user.is_active:
for course_key in course_keys:
entry = CourseAccessRole(user=self.user, role=self.role, course_id=course_key, org=course_key.org)
entry.save()
if hasattr(self.user, '_roles'):
del self.user._roles
else:
raise ValueError("user is not active. Cannot grant access to courses")
def remove_courses(self, *course_keys):
"""
Remove the supplied courses from this user's configured role.
"""
entries = CourseAccessRole.objects.filter(user=self.user, role=self.role, course_id__in=course_keys)
entries.delete()
if hasattr(self.user, '_roles'):
del self.user._roles
def courses_with_role(self):
"""
Return a django QuerySet for all of the courses with this user x role. You can access
any of these properties on each result record:
* user (will be self.user--thus uninteresting)
* org
* course_id
* role (will be self.role--thus uninteresting)
"""
return CourseAccessRole.objects.filter(role=self.role, user=self.user)
| agpl-3.0 |
bdupharm/sqlalchemy | lib/sqlalchemy/orm/util.py | 2 | 38404 | # orm/util.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .. import sql, util, event, exc as sa_exc, inspection
from ..sql import expression, util as sql_util, operators
from .interfaces import PropComparator, MapperProperty
from . import attributes
import re
from .base import instance_str, state_str, state_class_str, attribute_str, \
state_attribute_str, object_mapper, object_state, _none_set, _never_set
from .base import class_mapper, _class_to_mapper
from .base import InspectionAttr
from .path_registry import PathRegistry
all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
"expunge", "save-update", "refresh-expire",
"none"))
class CascadeOptions(frozenset):
"""Keeps track of the options sent to relationship().cascade"""
_add_w_all_cascades = all_cascades.difference([
'all', 'none', 'delete-orphan'])
_allowed_cascades = all_cascades
__slots__ = (
'save_update', 'delete', 'refresh_expire', 'merge',
'expunge', 'delete_orphan')
def __new__(cls, value_list):
if isinstance(value_list, util.string_types) or value_list is None:
return cls.from_string(value_list)
values = set(value_list)
if values.difference(cls._allowed_cascades):
raise sa_exc.ArgumentError(
"Invalid cascade option(s): %s" %
", ".join([repr(x) for x in
sorted(values.difference(cls._allowed_cascades))]))
if "all" in values:
values.update(cls._add_w_all_cascades)
if "none" in values:
values.clear()
values.discard('all')
self = frozenset.__new__(CascadeOptions, values)
self.save_update = 'save-update' in values
self.delete = 'delete' in values
self.refresh_expire = 'refresh-expire' in values
self.merge = 'merge' in values
self.expunge = 'expunge' in values
self.delete_orphan = "delete-orphan" in values
if self.delete_orphan and not self.delete:
util.warn("The 'delete-orphan' cascade "
"option requires 'delete'.")
return self
def __repr__(self):
return "CascadeOptions(%r)" % (
",".join([x for x in sorted(self)])
)
@classmethod
def from_string(cls, arg):
values = [
c for c
in re.split('\s*,\s*', arg or "")
if c
]
return cls(values)
def _validator_events(
desc, key, validator, include_removes, include_backrefs):
"""Runs a validation method on an attribute value to be set or
appended.
"""
if not include_backrefs:
def detect_is_backref(state, initiator):
impl = state.manager[key].impl
return initiator.impl is not impl
if include_removes:
def append(state, value, initiator):
if include_backrefs or not detect_is_backref(state, initiator):
return validator(state.obj(), key, value, False)
else:
return value
def set_(state, value, oldvalue, initiator):
if include_backrefs or not detect_is_backref(state, initiator):
return validator(state.obj(), key, value, False)
else:
return value
def remove(state, value, initiator):
if include_backrefs or not detect_is_backref(state, initiator):
validator(state.obj(), key, value, True)
else:
def append(state, value, initiator):
if include_backrefs or not detect_is_backref(state, initiator):
return validator(state.obj(), key, value)
else:
return value
def set_(state, value, oldvalue, initiator):
if include_backrefs or not detect_is_backref(state, initiator):
return validator(state.obj(), key, value)
else:
return value
event.listen(desc, 'append', append, raw=True, retval=True)
event.listen(desc, 'set', set_, raw=True, retval=True)
if include_removes:
event.listen(desc, "remove", remove, raw=True, retval=True)
def polymorphic_union(table_map, typecolname,
aliasname='p_union', cast_nulls=True):
"""Create a ``UNION`` statement used by a polymorphic mapper.
See :ref:`concrete_inheritance` for an example of how
this is used.
:param table_map: mapping of polymorphic identities to
:class:`.Table` objects.
:param typecolname: string name of a "discriminator" column, which will be
derived from the query, producing the polymorphic identity for
each row. If ``None``, no polymorphic discriminator is generated.
:param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()`
construct generated.
:param cast_nulls: if True, non-existent columns, which are represented
as labeled NULLs, will be passed into CAST. This is a legacy behavior
that is problematic on some backends such as Oracle - in which case it
can be set to False.
"""
colnames = util.OrderedSet()
colnamemaps = {}
types = {}
for key in table_map:
table = table_map[key]
# mysql doesn't like selecting from a select;
# make it an alias of the select
if isinstance(table, sql.Select):
table = table.alias()
table_map[key] = table
m = {}
for c in table.c:
colnames.add(c.key)
m[c.key] = c
types[c.key] = c.type
colnamemaps[table] = m
def col(name, table):
try:
return colnamemaps[table][name]
except KeyError:
if cast_nulls:
return sql.cast(sql.null(), types[name]).label(name)
else:
return sql.type_coerce(sql.null(), types[name]).label(name)
result = []
for type, table in table_map.items():
if typecolname is not None:
result.append(
sql.select([col(name, table) for name in colnames] +
[sql.literal_column(
sql_util._quote_ddl_expr(type)).
label(typecolname)],
from_obj=[table]))
else:
result.append(sql.select([col(name, table) for name in colnames],
from_obj=[table]))
return sql.union_all(*result).alias(aliasname)
def identity_key(*args, **kwargs):
"""Generate "identity key" tuples, as are used as keys in the
:attr:`.Session.identity_map` dictionary.
This function has several call styles:
* ``identity_key(class, ident)``
This form receives a mapped class and a primary key scalar or
tuple as an argument.
E.g.::
>>> identity_key(MyClass, (1, 2))
(<class '__main__.MyClass'>, (1, 2))
:param class: mapped class (must be a positional argument)
:param ident: primary key, may be a scalar or tuple argument.
* ``identity_key(instance=instance)``
This form will produce the identity key for a given instance. The
instance need not be persistent, only that its primary key attributes
are populated (else the key will contain ``None`` for those missing
values).
E.g.::
>>> instance = MyClass(1, 2)
>>> identity_key(instance=instance)
(<class '__main__.MyClass'>, (1, 2))
In this form, the given instance is ultimately run though
:meth:`.Mapper.identity_key_from_instance`, which will have the
effect of performing a database check for the corresponding row
if the object is expired.
:param instance: object instance (must be given as a keyword arg)
* ``identity_key(class, row=row)``
This form is similar to the class/tuple form, except is passed a
database result row as a :class:`.RowProxy` object.
E.g.::
>>> row = engine.execute("select * from table where a=1 and b=2").\
first()
>>> identity_key(MyClass, row=row)
(<class '__main__.MyClass'>, (1, 2))
:param class: mapped class (must be a positional argument)
:param row: :class:`.RowProxy` row returned by a :class:`.ResultProxy`
(must be given as a keyword arg)
"""
if args:
if len(args) == 1:
class_ = args[0]
try:
row = kwargs.pop("row")
except KeyError:
ident = kwargs.pop("ident")
elif len(args) == 2:
class_, ident = args
elif len(args) == 3:
class_, ident = args
else:
raise sa_exc.ArgumentError(
"expected up to three positional arguments, "
"got %s" % len(args))
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
% ", ".join(kwargs))
mapper = class_mapper(class_)
if "ident" in locals():
return mapper.identity_key_from_primary_key(util.to_list(ident))
return mapper.identity_key_from_row(row)
instance = kwargs.pop("instance")
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
% ", ".join(kwargs.keys))
mapper = object_mapper(instance)
return mapper.identity_key_from_instance(instance)
class ORMAdapter(sql_util.ColumnAdapter):
"""ColumnAdapter subclass which excludes adaptation of entities from
non-matching mappers.
"""
def __init__(self, entity, equivalents=None, adapt_required=False,
chain_to=None, allow_label_resolve=True,
anonymize_labels=False):
info = inspection.inspect(entity)
self.mapper = info.mapper
selectable = info.selectable
is_aliased_class = info.is_aliased_class
if is_aliased_class:
self.aliased_class = entity
else:
self.aliased_class = None
sql_util.ColumnAdapter.__init__(
self, selectable, equivalents, chain_to,
adapt_required=adapt_required,
allow_label_resolve=allow_label_resolve,
anonymize_labels=anonymize_labels,
include_fn=self._include_fn
)
def _include_fn(self, elem):
entity = elem._annotations.get('parentmapper', None)
return not entity or entity.isa(self.mapper)
class AliasedClass(object):
"""Represents an "aliased" form of a mapped class for usage with Query.
The ORM equivalent of a :func:`sqlalchemy.sql.expression.alias`
construct, this object mimics the mapped class using a
__getattr__ scheme and maintains a reference to a
real :class:`~sqlalchemy.sql.expression.Alias` object.
Usage is via the :func:`.orm.aliased` function, or alternatively
via the :func:`.orm.with_polymorphic` function.
Usage example::
# find all pairs of users with the same name
user_alias = aliased(User)
session.query(User, user_alias).\\
join((user_alias, User.id > user_alias.id)).\\
filter(User.name==user_alias.name)
The resulting object is an instance of :class:`.AliasedClass`.
This object implements an attribute scheme which produces the
same attribute and method interface as the original mapped
class, allowing :class:`.AliasedClass` to be compatible
with any attribute technique which works on the original class,
including hybrid attributes (see :ref:`hybrids_toplevel`).
The :class:`.AliasedClass` can be inspected for its underlying
:class:`.Mapper`, aliased selectable, and other information
using :func:`.inspect`::
from sqlalchemy import inspect
my_alias = aliased(MyClass)
insp = inspect(my_alias)
The resulting inspection object is an instance of :class:`.AliasedInsp`.
See :func:`.aliased` and :func:`.with_polymorphic` for construction
argument descriptions.
"""
def __init__(self, cls, alias=None,
name=None,
flat=False,
adapt_on_names=False,
# TODO: None for default here?
with_polymorphic_mappers=(),
with_polymorphic_discriminator=None,
base_alias=None,
use_mapper_path=False):
mapper = _class_to_mapper(cls)
if alias is None:
alias = mapper._with_polymorphic_selectable.alias(
name=name, flat=flat)
self._aliased_insp = AliasedInsp(
self,
mapper,
alias,
name,
with_polymorphic_mappers
if with_polymorphic_mappers
else mapper.with_polymorphic_mappers,
with_polymorphic_discriminator
if with_polymorphic_discriminator is not None
else mapper.polymorphic_on,
base_alias,
use_mapper_path,
adapt_on_names
)
self.__name__ = 'AliasedClass_%s' % mapper.class_.__name__
def __getattr__(self, key):
try:
_aliased_insp = self.__dict__['_aliased_insp']
except KeyError:
raise AttributeError()
else:
for base in _aliased_insp._target.__mro__:
try:
attr = object.__getattribute__(base, key)
except AttributeError:
continue
else:
break
else:
raise AttributeError(key)
if isinstance(attr, PropComparator):
ret = attr.adapt_to_entity(_aliased_insp)
setattr(self, key, ret)
return ret
elif hasattr(attr, 'func_code'):
is_method = getattr(_aliased_insp._target, key, None)
if is_method and is_method.__self__ is not None:
return util.types.MethodType(attr.__func__, self, self)
else:
return None
elif hasattr(attr, '__get__'):
ret = attr.__get__(None, self)
if isinstance(ret, PropComparator):
return ret.adapt_to_entity(_aliased_insp)
else:
return ret
else:
return attr
def __repr__(self):
return '<AliasedClass at 0x%x; %s>' % (
id(self), self._aliased_insp._target.__name__)
class AliasedInsp(InspectionAttr):
"""Provide an inspection interface for an
:class:`.AliasedClass` object.
The :class:`.AliasedInsp` object is returned
given an :class:`.AliasedClass` using the
:func:`.inspect` function::
from sqlalchemy import inspect
from sqlalchemy.orm import aliased
my_alias = aliased(MyMappedClass)
insp = inspect(my_alias)
Attributes on :class:`.AliasedInsp`
include:
* ``entity`` - the :class:`.AliasedClass` represented.
* ``mapper`` - the :class:`.Mapper` mapping the underlying class.
* ``selectable`` - the :class:`.Alias` construct which ultimately
represents an aliased :class:`.Table` or :class:`.Select`
construct.
* ``name`` - the name of the alias. Also is used as the attribute
name when returned in a result tuple from :class:`.Query`.
* ``with_polymorphic_mappers`` - collection of :class:`.Mapper` objects
indicating all those mappers expressed in the select construct
for the :class:`.AliasedClass`.
* ``polymorphic_on`` - an alternate column or SQL expression which
will be used as the "discriminator" for a polymorphic load.
.. seealso::
:ref:`inspection_toplevel`
"""
def __init__(self, entity, mapper, selectable, name,
with_polymorphic_mappers, polymorphic_on,
_base_alias, _use_mapper_path, adapt_on_names):
self.entity = entity
self.mapper = mapper
self.selectable = selectable
self.name = name
self.with_polymorphic_mappers = with_polymorphic_mappers
self.polymorphic_on = polymorphic_on
self._base_alias = _base_alias or self
self._use_mapper_path = _use_mapper_path
self._adapter = sql_util.ColumnAdapter(
selectable, equivalents=mapper._equivalent_columns,
adapt_on_names=adapt_on_names, anonymize_labels=True)
self._adapt_on_names = adapt_on_names
self._target = mapper.class_
for poly in self.with_polymorphic_mappers:
if poly is not mapper:
setattr(self.entity, poly.class_.__name__,
AliasedClass(poly.class_, selectable, base_alias=self,
adapt_on_names=adapt_on_names,
use_mapper_path=_use_mapper_path))
is_aliased_class = True
"always returns True"
@property
def class_(self):
"""Return the mapped class ultimately represented by this
:class:`.AliasedInsp`."""
return self.mapper.class_
@util.memoized_property
def _path_registry(self):
if self._use_mapper_path:
return self.mapper._path_registry
else:
return PathRegistry.per_mapper(self)
def __getstate__(self):
return {
'entity': self.entity,
'mapper': self.mapper,
'alias': self.selectable,
'name': self.name,
'adapt_on_names': self._adapt_on_names,
'with_polymorphic_mappers':
self.with_polymorphic_mappers,
'with_polymorphic_discriminator':
self.polymorphic_on,
'base_alias': self._base_alias,
'use_mapper_path': self._use_mapper_path
}
def __setstate__(self, state):
self.__init__(
state['entity'],
state['mapper'],
state['alias'],
state['name'],
state['with_polymorphic_mappers'],
state['with_polymorphic_discriminator'],
state['base_alias'],
state['use_mapper_path'],
state['adapt_on_names']
)
def _adapt_element(self, elem):
return self._adapter.traverse(elem).\
_annotate({
'parententity': self,
'parentmapper': self.mapper}
)
def _entity_for_mapper(self, mapper):
self_poly = self.with_polymorphic_mappers
if mapper in self_poly:
if mapper is self.mapper:
return self
else:
return getattr(
self.entity, mapper.class_.__name__)._aliased_insp
elif mapper.isa(self.mapper):
return self
else:
assert False, "mapper %s doesn't correspond to %s" % (
mapper, self)
def __repr__(self):
if self.with_polymorphic_mappers:
with_poly = "(%s)" % ", ".join(
mp.class_.__name__ for mp in self.with_polymorphic_mappers)
else:
with_poly = ""
return '<AliasedInsp at 0x%x; %s%s>' % (
id(self), self.class_.__name__, with_poly)
inspection._inspects(AliasedClass)(lambda target: target._aliased_insp)
inspection._inspects(AliasedInsp)(lambda target: target)
def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
"""Produce an alias of the given element, usually an :class:`.AliasedClass`
instance.
E.g.::
my_alias = aliased(MyClass)
session.query(MyClass, my_alias).filter(MyClass.id > my_alias.id)
The :func:`.aliased` function is used to create an ad-hoc mapping
of a mapped class to a new selectable. By default, a selectable
is generated from the normally mapped selectable (typically a
:class:`.Table`) using the :meth:`.FromClause.alias` method.
However, :func:`.aliased` can also be used to link the class to
a new :func:`.select` statement. Also, the :func:`.with_polymorphic`
function is a variant of :func:`.aliased` that is intended to specify
a so-called "polymorphic selectable", that corresponds to the union
of several joined-inheritance subclasses at once.
For convenience, the :func:`.aliased` function also accepts plain
:class:`.FromClause` constructs, such as a :class:`.Table` or
:func:`.select` construct. In those cases, the :meth:`.FromClause.alias`
method is called on the object and the new :class:`.Alias` object
returned. The returned :class:`.Alias` is not ORM-mapped in this case.
:param element: element to be aliased. Is normally a mapped class,
but for convenience can also be a :class:`.FromClause` element.
:param alias: Optional selectable unit to map the element to. This should
normally be a :class:`.Alias` object corresponding to the :class:`.Table`
to which the class is mapped, or to a :func:`.select` construct that
is compatible with the mapping. By default, a simple anonymous
alias of the mapped table is generated.
:param name: optional string name to use for the alias, if not specified
by the ``alias`` parameter. The name, among other things, forms the
attribute name that will be accessible via tuples returned by a
:class:`.Query` object.
:param flat: Boolean, will be passed through to the
:meth:`.FromClause.alias` call so that aliases of :class:`.Join` objects
don't include an enclosing SELECT. This can lead to more efficient
queries in many circumstances. A JOIN against a nested JOIN will be
rewritten as a JOIN against an aliased SELECT subquery on backends that
don't support this syntax.
.. versionadded:: 0.9.0
.. seealso:: :meth:`.Join.alias`
:param adapt_on_names: if True, more liberal "matching" will be used when
mapping the mapped columns of the ORM entity to those of the
given selectable - a name-based match will be performed if the
given selectable doesn't otherwise have a column that corresponds
to one on the entity. The use case for this is when associating
an entity with some derived selectable such as one that uses
aggregate functions::
class UnitPrice(Base):
__tablename__ = 'unit_price'
...
unit_id = Column(Integer)
price = Column(Numeric)
aggregated_unit_price = Session.query(
func.sum(UnitPrice.price).label('price')
).group_by(UnitPrice.unit_id).subquery()
aggregated_unit_price = aliased(UnitPrice,
alias=aggregated_unit_price, adapt_on_names=True)
Above, functions on ``aggregated_unit_price`` which refer to
``.price`` will return the
``fund.sum(UnitPrice.price).label('price')`` column, as it is
matched on the name "price". Ordinarily, the "price" function
wouldn't have any "column correspondence" to the actual
``UnitPrice.price`` column as it is not a proxy of the original.
.. versionadded:: 0.7.3
"""
if isinstance(element, expression.FromClause):
if adapt_on_names:
raise sa_exc.ArgumentError(
"adapt_on_names only applies to ORM elements"
)
return element.alias(name, flat=flat)
else:
return AliasedClass(element, alias=alias, flat=flat,
name=name, adapt_on_names=adapt_on_names)
def with_polymorphic(base, classes, selectable=False,
flat=False,
polymorphic_on=None, aliased=False,
innerjoin=False, _use_mapper_path=False,
_existing_alias=None):
"""Produce an :class:`.AliasedClass` construct which specifies
columns for descendant mappers of the given base.
.. versionadded:: 0.8
:func:`.orm.with_polymorphic` is in addition to the existing
:class:`.Query` method :meth:`.Query.with_polymorphic`,
which has the same purpose but is not as flexible in its usage.
Using this method will ensure that each descendant mapper's
tables are included in the FROM clause, and will allow filter()
criterion to be used against those tables. The resulting
instances will also have those columns already loaded so that
no "post fetch" of those columns will be required.
See the examples at :ref:`with_polymorphic`.
:param base: Base class to be aliased.
:param classes: a single class or mapper, or list of
class/mappers, which inherit from the base class.
Alternatively, it may also be the string ``'*'``, in which case
all descending mapped classes will be added to the FROM clause.
:param aliased: when True, the selectable will be wrapped in an
alias, that is ``(SELECT * FROM <fromclauses>) AS anon_1``.
This can be important when using the with_polymorphic()
to create the target of a JOIN on a backend that does not
support parenthesized joins, such as SQLite and older
versions of MySQL.
:param flat: Boolean, will be passed through to the
:meth:`.FromClause.alias` call so that aliases of :class:`.Join`
objects don't include an enclosing SELECT. This can lead to more
efficient queries in many circumstances. A JOIN against a nested JOIN
will be rewritten as a JOIN against an aliased SELECT subquery on
backends that don't support this syntax.
Setting ``flat`` to ``True`` implies the ``aliased`` flag is
also ``True``.
.. versionadded:: 0.9.0
.. seealso:: :meth:`.Join.alias`
:param selectable: a table or select() statement that will
be used in place of the generated FROM clause. This argument is
required if any of the desired classes use concrete table
inheritance, since SQLAlchemy currently cannot generate UNIONs
among tables automatically. If used, the ``selectable`` argument
must represent the full set of tables and columns mapped by every
mapped class. Otherwise, the unaccounted mapped columns will
result in their table being appended directly to the FROM clause
which will usually lead to incorrect results.
:param polymorphic_on: a column to be used as the "discriminator"
column for the given selectable. If not given, the polymorphic_on
attribute of the base classes' mapper will be used, if any. This
is useful for mappings that don't have polymorphic loading
behavior by default.
:param innerjoin: if True, an INNER JOIN will be used. This should
only be specified if querying for one specific subtype only
"""
primary_mapper = _class_to_mapper(base)
if _existing_alias:
assert _existing_alias.mapper is primary_mapper
classes = util.to_set(classes)
new_classes = set([
mp.class_ for mp in
_existing_alias.with_polymorphic_mappers])
if classes == new_classes:
return _existing_alias
else:
classes = classes.union(new_classes)
mappers, selectable = primary_mapper.\
_with_polymorphic_args(classes, selectable,
innerjoin=innerjoin)
if aliased or flat:
selectable = selectable.alias(flat=flat)
return AliasedClass(base,
selectable,
with_polymorphic_mappers=mappers,
with_polymorphic_discriminator=polymorphic_on,
use_mapper_path=_use_mapper_path)
def _orm_annotate(element, exclude=None):
"""Deep copy the given ClauseElement, annotating each element with the
"_orm_adapt" flag.
Elements within the exclude collection will be cloned but not annotated.
"""
return sql_util._deep_annotate(element, {'_orm_adapt': True}, exclude)
def _orm_deannotate(element):
"""Remove annotations that link a column to a particular mapping.
Note this doesn't affect "remote" and "foreign" annotations
passed by the :func:`.orm.foreign` and :func:`.orm.remote`
annotators.
"""
return sql_util._deep_deannotate(element,
values=("_orm_adapt", "parententity")
)
def _orm_full_deannotate(element):
return sql_util._deep_deannotate(element)
class _ORMJoin(expression.Join):
"""Extend Join to support ORM constructs as input."""
__visit_name__ = expression.Join.__visit_name__
def __init__(
self,
left, right, onclause=None, isouter=False,
full=False, _left_memo=None, _right_memo=None):
left_info = inspection.inspect(left)
left_orm_info = getattr(left, '_joined_from_info', left_info)
right_info = inspection.inspect(right)
adapt_to = right_info.selectable
self._joined_from_info = right_info
self._left_memo = _left_memo
self._right_memo = _right_memo
if isinstance(onclause, util.string_types):
onclause = getattr(left_orm_info.entity, onclause)
if isinstance(onclause, attributes.QueryableAttribute):
on_selectable = onclause.comparator._source_selectable()
prop = onclause.property
elif isinstance(onclause, MapperProperty):
prop = onclause
on_selectable = prop.parent.selectable
else:
prop = None
if prop:
if sql_util.clause_is_present(
on_selectable, left_info.selectable):
adapt_from = on_selectable
else:
adapt_from = left_info.selectable
pj, sj, source, dest, \
secondary, target_adapter = prop._create_joins(
source_selectable=adapt_from,
dest_selectable=adapt_to,
source_polymorphic=True,
dest_polymorphic=True,
of_type=right_info.mapper)
if sj is not None:
if isouter:
# note this is an inner join from secondary->right
right = sql.join(secondary, right, sj)
onclause = pj
else:
left = sql.join(left, secondary, pj, isouter)
onclause = sj
else:
onclause = pj
self._target_adapter = target_adapter
expression.Join.__init__(self, left, right, onclause, isouter, full)
if not prop and getattr(right_info, 'mapper', None) \
and right_info.mapper.single:
# if single inheritance target and we are using a manual
# or implicit ON clause, augment it the same way we'd augment the
# WHERE.
single_crit = right_info.mapper._single_table_criterion
if single_crit is not None:
if right_info.is_aliased_class:
single_crit = right_info._adapter.traverse(single_crit)
self.onclause = self.onclause & single_crit
def _splice_into_center(self, other):
"""Splice a join into the center.
Given join(a, b) and join(b, c), return join(a, b).join(c)
"""
leftmost = other
while isinstance(leftmost, sql.Join):
leftmost = leftmost.left
assert self.right is leftmost
left = _ORMJoin(
self.left, other.left,
self.onclause, isouter=self.isouter,
_left_memo=self._left_memo,
_right_memo=other._left_memo
)
return _ORMJoin(
left,
other.right,
other.onclause, isouter=other.isouter,
_right_memo=other._right_memo
)
def join(
self, right, onclause=None,
isouter=False, full=False, join_to_left=None):
return _ORMJoin(self, right, onclause, full, isouter)
def outerjoin(
self, right, onclause=None,
full=False, join_to_left=None):
return _ORMJoin(self, right, onclause, True, full=full)
def join(
left, right, onclause=None, isouter=False,
full=False, join_to_left=None):
"""Produce an inner join between left and right clauses.
:func:`.orm.join` is an extension to the core join interface
provided by :func:`.sql.expression.join()`, where the
left and right selectables may be not only core selectable
objects such as :class:`.Table`, but also mapped classes or
:class:`.AliasedClass` instances. The "on" clause can
be a SQL expression, or an attribute or string name
referencing a configured :func:`.relationship`.
:func:`.orm.join` is not commonly needed in modern usage,
as its functionality is encapsulated within that of the
:meth:`.Query.join` method, which features a
significant amount of automation beyond :func:`.orm.join`
by itself. Explicit usage of :func:`.orm.join`
with :class:`.Query` involves usage of the
:meth:`.Query.select_from` method, as in::
from sqlalchemy.orm import join
session.query(User).\\
select_from(join(User, Address, User.addresses)).\\
filter(Address.email_address=='[email protected]')
In modern SQLAlchemy the above join can be written more
succinctly as::
session.query(User).\\
join(User.addresses).\\
filter(Address.email_address=='[email protected]')
See :meth:`.Query.join` for information on modern usage
of ORM level joins.
.. versionchanged:: 0.8.1 - the ``join_to_left`` parameter
is no longer used, and is deprecated.
"""
return _ORMJoin(left, right, onclause, isouter, full)
def outerjoin(left, right, onclause=None, full=False, join_to_left=None):
"""Produce a left outer join between left and right clauses.
This is the "outer join" version of the :func:`.orm.join` function,
featuring the same behavior except that an OUTER JOIN is generated.
See that function's documentation for other usage details.
"""
return _ORMJoin(left, right, onclause, True, full)
def with_parent(instance, prop):
"""Create filtering criterion that relates this query's primary entity
to the given related instance, using established :func:`.relationship()`
configuration.
The SQL rendered is the same as that rendered when a lazy loader
would fire off from the given parent on that attribute, meaning
that the appropriate state is taken from the parent object in
Python without the need to render joins to the parent table
in the rendered statement.
.. versionchanged:: 0.6.4
This method accepts parent instances in all
persistence states, including transient, persistent, and detached.
Only the requisite primary key/foreign key attributes need to
be populated. Previous versions didn't work with transient
instances.
:param instance:
An instance which has some :func:`.relationship`.
:param property:
String property name, or class-bound attribute, which indicates
what relationship from the instance should be used to reconcile the
parent/child relationship.
"""
if isinstance(prop, util.string_types):
mapper = object_mapper(instance)
prop = getattr(mapper.class_, prop).property
elif isinstance(prop, attributes.QueryableAttribute):
prop = prop.property
return prop._with_parent(instance)
def has_identity(object):
"""Return True if the given object has a database
identity.
This typically corresponds to the object being
in either the persistent or detached state.
.. seealso::
:func:`.was_deleted`
"""
state = attributes.instance_state(object)
return state.has_identity
def was_deleted(object):
"""Return True if the given object was deleted
within a session flush.
This is regardless of whether or not the object is
persistent or detached.
.. versionadded:: 0.8.0
.. seealso::
:attr:`.InstanceState.was_deleted`
"""
state = attributes.instance_state(object)
return state.was_deleted
def randomize_unitofwork():
"""Use random-ordering sets within the unit of work in order
to detect unit of work sorting issues.
This is a utility function that can be used to help reproduce
inconsistent unit of work sorting issues. For example,
if two kinds of objects A and B are being inserted, and
B has a foreign key reference to A - the A must be inserted first.
However, if there is no relationship between A and B, the unit of work
won't know to perform this sorting, and an operation may or may not
fail, depending on how the ordering works out. Since Python sets
and dictionaries have non-deterministic ordering, such an issue may
occur on some runs and not on others, and in practice it tends to
have a great dependence on the state of the interpreter. This leads
to so-called "heisenbugs" where changing entirely irrelevant aspects
of the test program still cause the failure behavior to change.
By calling ``randomize_unitofwork()`` when a script first runs, the
ordering of a key series of sets within the unit of work implementation
are randomized, so that the script can be minimized down to the
fundamental mapping and operation that's failing, while still reproducing
the issue on at least some runs.
This utility is also available when running the test suite via the
``--reversetop`` flag.
.. versionadded:: 0.8.1 created a standalone version of the
``--reversetop`` feature.
"""
from sqlalchemy.orm import unitofwork, session, mapper, dependency
from sqlalchemy.util import topological
from sqlalchemy.testing.util import RandomSet
topological.set = unitofwork.set = session.set = mapper.set = \
dependency.set = RandomSet
| mit |
grimoirelab/perceval | perceval/backends/core/git.py | 1 | 46476 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2019 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Santiago Dueñas <[email protected]>
#
import collections
import io
import logging
import os
import re
import subprocess
import threading
import dulwich.client
import dulwich.repo
from grimoirelab_toolkit.datetime import datetime_to_utc, str_to_datetime
from ...backend import (Backend,
BackendCommand,
BackendCommandArgumentParser)
from ...errors import RepositoryError, ParseError
from ...utils import DEFAULT_DATETIME, DEFAULT_LAST_DATETIME
CATEGORY_COMMIT = 'commit'
logger = logging.getLogger(__name__)
class Git(Backend):
"""Git backend.
This class allows the fetch the commits from a Git repository
(local or remote) or from a log file. To initialize this class,
you have to provide the URI repository and a value for `gitpath`.
This `uri` will be set as the origin of the data.
When `gitpath` is a directory or does not exist, it will be
considered as the place where the repository is/will be cloned;
when `gitpath` is a file it will be considered as a Git log file.
:param uri: URI of the Git repository
:param gitpath: path to the repository or to the log file
:param tag: label used to mark the data
:param archive: archive to store/retrieve items
:raises RepositoryError: raised when there was an error cloning or
updating the repository.
"""
version = '0.12.0'
CATEGORIES = [CATEGORY_COMMIT]
def __init__(self, uri, gitpath, tag=None, archive=None):
origin = uri
super().__init__(origin, tag=tag, archive=archive)
self.uri = uri
self.gitpath = gitpath
def fetch(self, category=CATEGORY_COMMIT, from_date=DEFAULT_DATETIME, to_date=DEFAULT_LAST_DATETIME,
branches=None, latest_items=False, no_update=False):
"""Fetch commits.
The method retrieves from a Git repository or a log file
a list of commits. Commits are returned in the same order
they were obtained.
When `from_date` parameter is given it returns items commited
since the given date.
The list of `branches` is a list of strings, with the names of
the branches to fetch. If the list of branches is empty, no
commit is fetched. If the list of branches is None, all commits
for all branches will be fetched.
The parameter `latest_items` returns only those commits which
are new since the last time this method was called.
The parameter `no_update` returns all commits without performing
an update of the repository before.
Take into account that `from_date` and `branches` are ignored
when the commits are fetched from a Git log file or when
`latest_items` flag is set.
The class raises a `RepositoryError` exception when an error
occurs accessing the repository.
:param category: the category of items to fetch
:param from_date: obtain commits newer than a specific date
(inclusive)
:param to_date: obtain commits older than a specific date
:param branches: names of branches to fetch from (default: None)
:param latest_items: sync with the repository to fetch only the
newest commits
:param no_update: if enabled, don't update the repo with the latest changes
:returns: a generator of commits
"""
if not from_date:
from_date = DEFAULT_DATETIME
if not to_date:
to_date = DEFAULT_LAST_DATETIME
kwargs = {
'from_date': from_date,
'to_date': to_date,
'branches': branches,
'latest_items': latest_items,
'no_update': no_update
}
items = super().fetch(category, **kwargs)
return items
def fetch_items(self, category, **kwargs):
"""Fetch the commits
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items
"""
from_date = kwargs['from_date']
to_date = kwargs['to_date']
branches = kwargs['branches']
latest_items = kwargs['latest_items']
no_update = kwargs['no_update']
ncommits = 0
try:
if os.path.isfile(self.gitpath):
commits = self.__fetch_from_log()
else:
commits = self.__fetch_from_repo(from_date, to_date, branches,
latest_items, no_update)
for commit in commits:
yield commit
ncommits += 1
except EmptyRepositoryError:
pass
logger.info("Fetch process completed: %s commits fetched",
ncommits)
@classmethod
def has_archiving(cls):
"""Returns whether it supports archiving items on the fetch process.
:returns: this backend does not support items archive
"""
return False
@classmethod
def has_resuming(cls):
"""Returns whether it supports to resume the fetch process.
:returns: this backend supports items resuming
"""
return True
@staticmethod
def metadata_id(item):
"""Extracts the identifier from a Git item."""
return item['commit']
@staticmethod
def metadata_updated_on(item):
"""Extracts the update time from a Git item.
The timestamp used is extracted from 'CommitDate' field.
This date is converted to UNIX timestamp format taking into
account the timezone of the date.
:param item: item generated by the backend
:returns: a UNIX timestamp
"""
ts = item['CommitDate']
ts = str_to_datetime(ts)
return ts.timestamp()
@staticmethod
def metadata_category(item):
"""Extracts the category from a Git item.
This backend only generates one type of item which is
'commit'.
"""
return CATEGORY_COMMIT
@staticmethod
def parse_git_log_from_file(filepath):
"""Parse a Git log file.
The method parses the Git log file and returns an iterator of
dictionaries. Each one of this, contains a commit.
:param filepath: path to the log file
:returns: a generator of parsed commits
:raises ParseError: raised when the format of the Git log file
is invalid
:raises OSError: raised when an error occurs reading the
given file
"""
with open(filepath, 'r', errors='surrogateescape',
newline=os.linesep) as f:
parser = GitParser(f)
for commit in parser.parse():
yield commit
@staticmethod
def parse_git_log_from_iter(iterator):
"""Parse a Git log obtained from an iterator.
The method parses the Git log fetched from an iterator, where
each item is a line of the log. It returns and iterator of
dictionaries. Each dictionary contains a commit.
:param iterator: iterator of Git log lines
:raises ParseError: raised when the format of the Git log
is invalid
"""
parser = GitParser(iterator)
for commit in parser.parse():
yield commit
def _init_client(self, from_archive=False):
pass
def __fetch_from_log(self):
logger.info("Fetching commits: '%s' git repository from log file %s",
self.uri, self.gitpath)
return self.parse_git_log_from_file(self.gitpath)
def __fetch_from_repo(self, from_date, to_date, branches, latest_items=False, no_update=False):
# When no latest items are set or the repository has not
# been cloned use the default mode
default_mode = not latest_items or not os.path.exists(self.gitpath)
repo = self.__create_git_repository()
if default_mode:
commits = self.__fetch_commits_from_repo(repo, from_date, to_date, branches, no_update)
else:
commits = self.__fetch_newest_commits_from_repo(repo)
return commits
def __fetch_commits_from_repo(self, repo, from_date, to_date, branches, no_update):
if branches is None:
branches_text = "all"
elif len(branches) == 0:
branches_text = "no"
else:
branches_text = ", ".join(branches)
logger.info("Fetching commits: '%s' git repository from %s to %s; %s branches",
self.uri, str(from_date), str(to_date), branches_text)
# Ignore default datetime to avoid problems with git
# or convert to UTC
if to_date == DEFAULT_LAST_DATETIME:
to_date = None
else:
to_date = datetime_to_utc(to_date)
if from_date == DEFAULT_DATETIME:
from_date = None
else:
from_date = datetime_to_utc(from_date)
if not no_update:
repo.update()
gitlog = repo.log(from_date, to_date, branches)
return self.parse_git_log_from_iter(gitlog)
def __fetch_newest_commits_from_repo(self, repo):
logger.info("Fetching latest commits: '%s' git repository",
self.uri)
hashes = repo.sync()
if not hashes:
return []
gitshow = repo.show(hashes)
return self.parse_git_log_from_iter(gitshow)
def __create_git_repository(self):
if not os.path.exists(self.gitpath):
repo = GitRepository.clone(self.uri, self.gitpath)
elif os.path.isdir(self.gitpath):
repo = GitRepository(self.uri, self.gitpath)
return repo
class GitCommand(BackendCommand):
"""Class to run Git backend from the command line."""
BACKEND = Git
def _pre_init(self):
"""Initialize repositories directory path"""
if self.parsed_args.git_log:
git_path = self.parsed_args.git_log
elif not self.parsed_args.git_path:
base_path = os.path.expanduser('~/.perceval/repositories/')
processed_uri = self.parsed_args.uri.lstrip('/')
git_path = os.path.join(base_path, processed_uri) + '-git'
else:
git_path = self.parsed_args.git_path
setattr(self.parsed_args, 'gitpath', git_path)
@classmethod
def setup_cmd_parser(cls):
"""Returns the Git argument parser."""
parser = BackendCommandArgumentParser(cls.BACKEND,
from_date=True,
to_date=True)
# Optional arguments
group = parser.parser.add_argument_group('Git arguments')
group.add_argument('--branches', dest='branches',
nargs='+', type=str, default=None,
help="Fetch commits only from these branches")
# Mutual exclusive parameters
exgroup = group.add_mutually_exclusive_group()
exgroup.add_argument('--git-path', dest='git_path',
help="Path where the Git repository will be cloned")
exgroup.add_argument('--git-log', dest='git_log',
help="Path to the Git log file")
exgroup_fetch = group.add_mutually_exclusive_group()
exgroup_fetch.add_argument('--latest-items', dest='latest_items',
action='store_true',
help="Fetch latest commits added to the repository")
exgroup_fetch.add_argument('--no-update', dest='no_update',
action='store_true',
help="Fetch all commits without updating the repository")
# Required arguments
parser.parser.add_argument('uri',
help="URI of the Git log repository")
return parser
class GitParser:
"""Git log parser.
This class parses a plain Git log stream, converting plain commits
into dict items.
Not every Git log output is valid to be parsed. The Git log stream
must have a specific structure. It must contain raw commits data and
stats about modified files. The next excerpt shows an example of a
valid log:
commit aaa7a9209f096aaaadccaaa7089aaaa3f758a703
Author: John Smith <[email protected]>
AuthorDate: Tue Aug 14 14:30:13 2012 -0300
Commit: John Smith <[email protected]>
CommitDate: Tue Aug 14 14:30:13 2012 -0300
Commit for testing
:000000 100644 0000000... aaaaaaa... A aaa/otherthing
:000000 100644 0000000... aaaaaaa... A aaa/something
:000000 100644 0000000... aaaaaaa... A bbb/bthing
0 0 aaa/otherthing
0 0 aaa/something
0 0 bbb/bthing
Each commit starts with the 'commit' tag that is followed by the
SHA-1 of the commit, its parents (two or more parents in the case
of a merge) and a list of refs, if any.
commit 456a68ee1407a77f3e804a30dff245bb6c6b872f
ce8e0b86a1e9877f42fe9453ede418519115f367
51a3b654f252210572297f47597b31527c475fb8
(HEAD -> refs/heads/master)
The commit line is followed by one or more headers. Each header
has a key and a value:
Author: John Smith <[email protected]>
AuthorDate: Tue Aug 14 14:30:13 2012 -0300
Commit: John Smith <[email protected]>
CommitDate: Tue Aug 14 14:30:13 2012 -0300
Then, an empty line divides the headers from the commit message.
First line of the commit
Commit message splitted into one or several lines.
Each line of the message stars with 4 spaces.
Commit messages can contain a list of 'trailers'. These trailers
have the same format of headers but their meaning is project
dependent. This is an example of a commit message with trailers:
Commit message with trailers
This is the body of the message where trailers are included.
Trailers are part of the body so each line of the message
stars with 4 spaces.
Signed-off-by: John Doe <[email protected]>
Signed-off-by: Jane Rae <[email protected]>
After a new empty line, actions and stats over files can be found.
A action line starts with one or more ':' chars and contain data
about the old and new permissions of a file, its old and new indexes,
the action code and the filepath to the file. In the case of a copied,
renamed or moved file, the new filepath to that file is included.
:100644 100644 e69de29... e69de29... R100 aaa/otherthing aaa/otherthing.renamed
Stats lines include the number of lines added and removed, and the
name of the file. The new name is also included for moved or renamed
files.
10 0 aaa/{otherthing => otherthing.renamed}
The commit ends with an empty line.
Take into account that one empty line is valid at the beginning
of the log. This allows to parse empty logs without raising
exceptions.
This example was generated using the next command:
git log --raw --numstat --pretty=fuller --decorate=full \
--parents -M -C -c --remotes=origin --all
:param stream: a file object which stores the log
"""
COMMIT_PATTERN = r"""^commit[ \t](?P<commit>[a-f0-9]{40})
(?:[ \t](?P<parents>[a-f0-9][a-f0-9 \t]+))?
(?:[ \t]\((?P<refs>.+)\))?$
"""
HEADER_TRAILER_PATTERN = r"^(?P<name>[a-zA-z0-9\-]+)\:[ \t]+(?P<value>.+)$"
MESSAGE_LINE_PATTERN = r"^[\s]{4}(?P<msg>.*)$"
ACTION_PATTERN = r"""^(?P<sc>\:+)
(?P<modes>(?:\d{6}[ \t])+)
(?P<indexes>(?:[a-f0-9]+\.{,3}[ \t])+)
(?P<action>[^\t]+)\t+
(?P<file>[^\t]+)
(?:\t+(?P<newfile>.+))?$"""
STATS_PATTERN = r"^(?P<added>\d+|-)[ \t]+(?P<removed>\d+|-)[ \t]+(?P<file>.+)$"
EMPTY_LINE_PATTERN = r"^$"
# Compiled patterns
GIT_COMMIT_REGEXP = re.compile(COMMIT_PATTERN, re.VERBOSE)
GIT_HEADER_TRAILER_REGEXP = re.compile(HEADER_TRAILER_PATTERN, re.VERBOSE)
GIT_MESSAGE_REGEXP = re.compile(MESSAGE_LINE_PATTERN, re.VERBOSE)
GIT_ACTION_REGEXP = re.compile(ACTION_PATTERN, re.VERBOSE)
GIT_STATS_REGEXP = re.compile(STATS_PATTERN, re.VERBOSE)
GIT_NEXT_STATE_REGEXP = re.compile(EMPTY_LINE_PATTERN, re.VERBOSE)
# Git parser status
(INIT,
COMMIT,
HEADER,
MESSAGE,
FILE) = range(5)
# Git trailers
TRAILERS = ['Signed-off-by']
def __init__(self, stream):
self.stream = stream
self.nline = 0
self.state = self.INIT
# Aux vars to store the commit that is being parsed
self.commit = None
self.commit_files = {}
self.handlers = {
self.INIT: self._handle_init,
self.COMMIT: self._handle_commit,
self.HEADER: self._handle_header,
self.MESSAGE: self._handle_message,
self.FILE: self._handle_file
}
def parse(self):
"""Parse the Git log stream."""
for line in self.stream:
line = line.rstrip('\n')
parsed = False
self.nline += 1
while not parsed:
parsed = self.handlers[self.state](line)
if self.state == self.COMMIT and self.commit:
commit = self._build_commit()
logger.debug("Commit %s parsed", commit['commit'])
yield commit
# Return the last commit, if any
if self.commit:
commit = self._build_commit()
logger.debug("Commit %s parsed", commit['commit'])
yield commit
def _build_commit(self):
def remove_none_values(d):
return {k: v for k, v in d.items() if v is not None}
commit = self.commit
commit = remove_none_values(commit)
commit['files'] = [remove_none_values(item)
for _, item in sorted(self.commit_files.items())]
self.commit = None
self.commit_files = {}
return commit
def _handle_init(self, line):
m = self.GIT_NEXT_STATE_REGEXP.match(line)
# In both cases, the parser advances to the next state.
# It only has to check whether the line has to be parsed
# again or not
self.state = self.COMMIT
parsed = m is not None
return parsed
def _handle_commit(self, line):
m = self.GIT_COMMIT_REGEXP.match(line)
if not m:
msg = "commit expected on line %s" % (str(self.nline))
raise ParseError(cause=msg)
parents = self.__parse_data_list(m.group('parents'))
refs = self.__parse_data_list(m.group('refs'), sep=',')
# Initialize a new commit
self.commit = {}
self.commit['commit'] = m.group('commit')
self.commit['parents'] = parents
self.commit['refs'] = refs
self.state = self.HEADER
return True
def _handle_header(self, line):
m = self.GIT_NEXT_STATE_REGEXP.match(line)
if m:
self.state = self.MESSAGE
return True
m = self.GIT_HEADER_TRAILER_REGEXP.match(line)
if not m:
msg = "invalid header format on line %s" % (str(self.nline))
raise ParseError(cause=msg)
header = m.group('name')
value = m.group('value')
self.commit[header] = value
return True
def _handle_message(self, line):
m = self.GIT_NEXT_STATE_REGEXP.match(line)
if m:
self.state = self.FILE
return True
m = self.GIT_MESSAGE_REGEXP.match(line)
if not m:
logger.debug("Invalid message format on line %s. Skipping.",
str(self.nline))
self.state = self.FILE
return False
msg_line = m.group('msg')
# Concatenate message lines
if 'message' not in self.commit:
self.commit['message'] = ''
else:
self.commit['message'] += '\n'
self.commit['message'] += msg_line
# Check trailers
self._handle_trailer(msg_line)
return True
def _handle_file(self, line):
m = self.GIT_NEXT_STATE_REGEXP.match(line)
if m:
self.state = self.COMMIT
return True
m = self.GIT_ACTION_REGEXP.match(line)
if m:
data = m.groupdict()
self._handle_action_data(data)
return True
m = self.GIT_STATS_REGEXP.match(line)
if m:
data = m.groupdict()
self._handle_stats_data(data)
return True
# No match case
logger.debug("Invalid action format on line %s. Skipping.",
str(self.nline))
self.state = self.COMMIT
return False
def _handle_trailer(self, line):
m = self.GIT_HEADER_TRAILER_REGEXP.match(line)
if not m:
return
trailer = m.group('name')
value = m.group('value')
if trailer not in self.TRAILERS:
logger.debug("Trailer %s found on line %s but is not a core trailer. Skipping.",
trailer, str(self.nline))
return
self.commit.setdefault(trailer, []).append(value)
def _handle_action_data(self, data):
modes = self.__parse_data_list(data['modes'])
indexes = self.__parse_data_list(data['indexes'])
filename = data['file']
if filename not in self.commit_files:
self.commit_files[filename] = {}
self.commit_files[filename]['modes'] = modes
self.commit_files[filename]['indexes'] = indexes
self.commit_files[filename]['action'] = data['action']
self.commit_files[filename]['file'] = filename
self.commit_files[filename]['newfile'] = data['newfile']
def _handle_stats_data(self, data):
filename = self.__get_old_filepath(data['file'])
if filename not in self.commit_files:
self.commit_files[filename] = {'file': filename}
self.commit_files[filename]['added'] = data['added']
self.commit_files[filename]['removed'] = data['removed']
def __parse_data_list(self, data, sep=' '):
if data:
lst = data.strip().split(sep)
return [e.strip() for e in lst]
else:
return []
def __get_old_filepath(self, f):
"""Get the old filepath of a moved/renamed file.
Moved or renamed files can be found in the log with any of the
next patterns:
'old_name => new_name'
'{old_prefix => new_prefix}/name'
'name/{old_suffix => new_suffix}'
This method returns the filepath before the file was moved or
renamed.
"""
i = f.find('{')
j = f.find('}')
if i > -1 and j > -1:
prefix = f[0:i]
inner = f[i + 1:f.find(' => ', i)]
suffix = f[j + 1:]
return prefix + inner + suffix
elif ' => ' in f:
return f.split(' => ')[0]
else:
return f
class EmptyRepositoryError(RepositoryError):
"""Exception raised when a repository is empty"""
message = "%(repository)s is empty"
GitRef = collections.namedtuple('GitRef', ['hash', 'refname'])
class _GraphWalker:
"""Commit walker needed by fetch_pack"""
def __init__(self, local_refs):
self.heads = [
ref.hash.encode('utf-8') for ref in local_refs
if ref.refname.startswith('refs/heads/')
]
def ack(self, sha):
pass
def next(self):
if self.heads:
ret = self.heads.pop()
return ret
return None
__next__ = next
class GitRepository:
"""Manage a Git repository.
This class provides access to a Git repository running some
common commands such as `clone`, `pull` or `log`.
To create an instance from a remote repository, use `clone()`
class method.
:param uri: URI of the repository
:param dirpath: local directory where the repository is stored
"""
GIT_PRETTY_OUTPUT_OPTS = [
'--raw', # show data in raw format
'--numstat', # show added/deleted lines per file
'--pretty=fuller', # pretty output
'--decorate=full', # show full refs
'--parents', # show parents information
'-M', # detect and report renames
'-C', # detect and report copies
'-c', # show merge info
]
def __init__(self, uri, dirpath):
gitdir = os.path.join(dirpath, 'HEAD')
if not os.path.exists(dirpath):
cause = "directory '%s' for Git repository '%s' does not exist" % (dirpath, uri)
raise RepositoryError(cause=cause)
elif not os.path.exists(gitdir):
warning = "Working directories for Git repositories no longer supported." \
"Please remove it or clone it using --mirror option."
logger.warning(warning)
cause = "directory '%s' is not a Git mirror of repository '%s'" % (dirpath, uri)
raise RepositoryError(cause=cause)
self.uri = uri
self.dirpath = dirpath
self.gitenv = {
'LANG': 'C',
'PAGER': '',
'HTTP_PROXY': os.getenv('HTTP_PROXY', ''),
'HTTPS_PROXY': os.getenv('HTTPS_PROXY', ''),
'NO_PROXY': os.getenv('NO_PROXY', ''),
'HOME': os.getenv('HOME', '')
}
@classmethod
def clone(cls, uri, dirpath):
"""Clone a Git repository.
Make a bare copy of the repository stored in `uri` into `dirpath`.
The repository would be either local or remote.
:param uri: URI of the repository
:param dirtpath: directory where the repository will be cloned
:returns: a `GitRepository` class having cloned the repository
:raises RepositoryError: when an error occurs cloning the given
repository
"""
cmd = ['git', 'clone', '--bare', uri, dirpath]
env = {
'LANG': 'C',
'HOME': os.getenv('HOME', '')
}
cls._exec(cmd, env=env)
logger.debug("Git %s repository cloned into %s",
uri, dirpath)
return cls(uri, dirpath)
def count_objects(self):
"""Count the objects of a repository.
The method returns the total number of objects (packed and unpacked)
available on the repository.
:raises RepositoryError: when an error occurs counting the objects
of a repository
"""
cmd_count = ['git', 'count-objects', '-v']
outs = self._exec(cmd_count, cwd=self.dirpath, env=self.gitenv)
outs = outs.decode('utf-8', errors='surrogateescape').rstrip()
try:
cobjs = {k: v for k, v in (x.split(': ') for x in outs.split('\n'))}
nobjs = int(cobjs['count']) + int(cobjs['in-pack'])
except KeyError as e:
error = "unable to parse 'count-objects' output; reason: '%s' entry not found" \
% e.args[0]
raise RepositoryError(cause=error)
except ValueError as e:
error = "unable to parse 'count-objects' output; reason: %s" % str(e)
raise RepositoryError(cause=error)
logger.debug("Git %s repository has %s objects",
self.uri, str(nobjs))
return nobjs
def is_detached(self):
"""Check if the repo is in a detached state.
The repository is in a detached state when HEAD is not a symbolic
reference.
:returns: whether the repository is detached or not
:raises RepositoryError: when an error occurs checking the state
of the repository
"""
cmd_sym = ['git', 'symbolic-ref', 'HEAD']
try:
self._exec(cmd_sym, cwd=self.dirpath, env=self.gitenv)
except RepositoryError as e:
if e.msg.find("ref HEAD is not a symbolic ref") == -1:
raise e
return True
else:
return False
def is_empty(self):
"""Determines whether the repository is empty or not.
Returns `True` when the repository is empty. Under the hood,
it checks the number of objects on the repository. When
this number is 0, the repositoy is empty.
:raises RepositoryError: when an error occurs accessing the
repository
"""
return self.count_objects() == 0
def update(self):
"""Update repository from its remote.
Calling this method, the repository will be synchronized with
the remote repository using 'fetch' command for 'heads' refs.
Any commit stored in the local copy will be removed; refs
will be overwritten.
:raises RepositoryError: when an error occurs updating the
repository
"""
cmd_update = ['git', 'fetch', 'origin', '+refs/heads/*:refs/heads/*', '--prune']
self._exec(cmd_update, cwd=self.dirpath, env=self.gitenv)
logger.debug("Git %s repository updated into %s",
self.uri, self.dirpath)
def sync(self):
"""Keep the repository in sync.
This method will synchronize the repository with its 'origin',
fetching newest objects and updating references. It uses low
level commands which allow to keep track of which things
have changed in the repository.
The method also returns a list of hashes related to the new
commits fetched during the process.
:returns: list of new commits
:raises RepositoryError: when an error occurs synchronizing
the repository
"""
pack_name, refs = self._fetch_pack()
if pack_name:
commits = self._read_commits_from_pack(pack_name)
else:
commits = []
logger.debug("Git repository %s (%s) does not have any new object",
self.uri, self.dirpath)
self._update_references(refs)
logger.debug("Git repository %s (%s) is synced",
self.uri, self.dirpath)
return commits
def rev_list(self, branches=None):
"""Read the list commits from the repository
The list of branches is a list of strings, with the names of the
branches to fetch. If the list of branches is empty, no commit
is fetched. If the list of branches is None, all commits
for all branches will be fetched.
The method returns the Git rev-list of the repository using the
following options:
git rev-list --topo-order
:param branches: names of branches to fetch from (default: None)
:raises EmptyRepositoryError: when the repository is empty and
the action cannot be performed
:raises RepositoryError: when an error occurs executing the command
"""
if self.is_empty():
logger.warning("Git %s repository is empty; unable to get the rev-list",
self.uri)
raise EmptyRepositoryError(repository=self.uri)
cmd_rev_list = ['git', 'rev-list', '--topo-order']
if branches is None:
cmd_rev_list.extend(['--branches', '--tags', '--remotes=origin'])
elif len(branches) == 0:
cmd_rev_list.extend(['--branches', '--tags', '--max-count=0'])
else:
branches = ['refs/heads/' + branch for branch in branches]
cmd_rev_list.extend(branches)
for line in self._exec_nb(cmd_rev_list, cwd=self.dirpath, env=self.gitenv):
yield line.rstrip('\n')
logger.debug("Git rev-list fetched from %s repository (%s)",
self.uri, self.dirpath)
def log(self, from_date=None, to_date=None, branches=None, encoding='utf-8'):
"""Read the commit log from the repository.
The method returns the Git log of the repository using the
following options:
git log --raw --numstat --pretty=fuller --decorate=full
--all --reverse --topo-order --parents -M -C -c
--remotes=origin
When `from_date` is given, it gets the commits equal or older
than that date. This date is given in a datetime object.
The list of branches is a list of strings, with the names of the
branches to fetch. If the list of branches is empty, no commit
is fetched. If the list of branches is None, all commits
for all branches will be fetched.
:param from_date: fetch commits newer than a specific
date (inclusive)
:param branches: names of branches to fetch from (default: None)
:param encoding: encode the log using this format
:returns: a generator where each item is a line from the log
:raises EmptyRepositoryError: when the repository is empty and
the action cannot be performed
:raises RepositoryError: when an error occurs fetching the log
"""
if self.is_empty():
logger.warning("Git %s repository is empty; unable to get the log",
self.uri)
raise EmptyRepositoryError(repository=self.uri)
cmd_log = ['git', 'log', '--reverse', '--topo-order']
cmd_log.extend(self.GIT_PRETTY_OUTPUT_OPTS)
if from_date:
dt = from_date.strftime("%Y-%m-%d %H:%M:%S %z")
cmd_log.append('--since=' + dt)
if to_date:
dt = to_date.strftime("%Y-%m-%d %H:%M:%S %z")
cmd_log.append('--until=' + dt)
if branches is None:
cmd_log.extend(['--branches', '--tags', '--remotes=origin'])
elif len(branches) == 0:
cmd_log.append('--max-count=0')
else:
branches = ['refs/heads/' + branch for branch in branches]
cmd_log.extend(branches)
for line in self._exec_nb(cmd_log, cwd=self.dirpath, env=self.gitenv):
yield line
logger.debug("Git log fetched from %s repository (%s)",
self.uri, self.dirpath)
def show(self, commits=None, encoding='utf-8'):
"""Show the data of a set of commits.
The method returns the output of Git show command for a
set of commits using the following options:
git show --raw --numstat --pretty=fuller --decorate=full
--parents -M -C -c [<commit>...<commit>]
When the list of commits is empty, the command will return
data about the last commit, like the default behaviour of
`git show`.
:param commits: list of commits to show data
:param encoding: encode the output using this format
:returns: a generator where each item is a line from the show output
:raises EmptyRepositoryError: when the repository is empty and
the action cannot be performed
:raises RepositoryError: when an error occurs fetching the show output
"""
if self.is_empty():
logger.warning("Git %s repository is empty; unable to run show",
self.uri)
raise EmptyRepositoryError(repository=self.uri)
if commits is None:
commits = []
cmd_show = ['git', 'show']
cmd_show.extend(self.GIT_PRETTY_OUTPUT_OPTS)
cmd_show.extend(commits)
for line in self._exec_nb(cmd_show, cwd=self.dirpath, env=self.gitenv):
yield line
logger.debug("Git show fetched from %s repository (%s)",
self.uri, self.dirpath)
def _fetch_pack(self):
"""Fetch changes and store them in a pack."""
def prepare_refs(refs):
return [ref.hash.encode('utf-8') for ref in refs
if not ref.refname.endswith('^{}')]
def determine_wants(refs):
remote_refs = prepare_refs(self._discover_refs(remote=True))
local_refs = prepare_refs(self._discover_refs())
wants = [ref for ref in remote_refs if ref not in local_refs]
return wants
client, repo_path = dulwich.client.get_transport_and_path(self.uri)
repo = dulwich.repo.Repo(self.dirpath)
fd = io.BytesIO()
local_refs = self._discover_refs()
graph_walker = _GraphWalker(local_refs)
result = client.fetch_pack(repo_path,
determine_wants,
graph_walker,
fd.write)
refs = [GitRef(ref_hash.decode('utf-8'), ref_name.decode('utf-8'))
for ref_name, ref_hash in result.refs.items()]
if len(fd.getvalue()) > 0:
fd.seek(0)
pack = repo.object_store.add_thin_pack(fd.read, None)
pack_name = pack.name().decode('utf-8')
else:
pack_name = None
return (pack_name, refs)
def _read_commits_from_pack(self, packet_name):
"""Read the commits of a pack."""
filepath = 'objects/pack/pack-' + packet_name
cmd_verify_pack = ['git', 'verify-pack', '-v', filepath]
outs = self._exec(cmd_verify_pack, cwd=self.dirpath, env=self.gitenv)
outs = outs.decode('utf-8', errors='surrogateescape').rstrip()
lines = [line.split(' ') for line in outs.split('\n')]
# Commits usually come in the pack ordered from newest to oldest
commits = [parts[0] for parts in lines if parts[1] == 'commit']
commits.reverse()
return commits
def _update_references(self, refs):
"""Update references removing old ones."""
new_refs = [ref.refname for ref in refs]
# Delete old references
for old_ref in self._discover_refs():
if not old_ref.refname.startswith('refs/heads/'):
continue
if old_ref.refname in new_refs:
continue
self._update_ref(old_ref, delete=True)
# Update new references
for new_ref in refs:
refname = new_ref.refname
if refname.endswith('^{}'):
logger.debug("Annotated tag %s ignored for updating in sync process",
refname)
continue
elif not refname.startswith('refs/heads/') and not refname.startswith('refs/tags/'):
logger.debug("Reference %s not needed; ignored for updating in sync process",
refname)
continue
else:
self._update_ref(new_ref)
# Prune repository to remove old branches
cmd = ['git', 'remote', 'prune', 'origin']
self._exec(cmd, cwd=self.dirpath, env=self.gitenv)
def _discover_refs(self, remote=False):
"""Get the current list of local or remote refs."""
if remote:
cmd_refs = ['git', 'ls-remote', '-h', '-t', '--exit-code', 'origin']
sep = '\t'
ignored_error_codes = [2]
else:
# Check first whether the local repo is empty;
# Running 'show-ref' in empty repos gives an error
if self.is_empty():
raise EmptyRepositoryError(repository=self.uri)
cmd_refs = ['git', 'show-ref', '--heads', '--tags']
sep = ' '
ignored_error_codes = [1]
# Error codes returned when no matching refs (i.e, no heads
# or tags) are found in a repository will be ignored. Otherwise,
# the full process would fail for those situations.
outs = self._exec(cmd_refs, cwd=self.dirpath,
env=self.gitenv,
ignored_error_codes=ignored_error_codes)
outs = outs.decode('utf-8', errors='surrogateescape').rstrip()
outs = outs.split('\n') if outs else []
refs = []
for line in outs:
data = line.split(sep)
ref = GitRef(data[0], data[1])
refs.append(ref)
return refs
def _update_ref(self, ref, delete=False):
"""Update a reference."""
cmd = ['git', 'update-ref']
if delete:
cmd.extend(['-d', ref.refname])
action = 'deleted'
else:
cmd.extend([ref.refname, ref.hash])
action = 'updated to %s' % ref.hash
try:
self._exec(cmd, cwd=self.dirpath, env=self.gitenv)
except RepositoryError as e:
logger.warning("Git %s ref could not be %s during sync process in %s (%s); skipped",
ref.refname, action, self.uri, self.dirpath)
else:
logger.debug("Git %s ref %s in %s (%s)",
ref.refname, action, self.uri, self.dirpath)
def _exec_nb(self, cmd, cwd=None, env=None, encoding='utf-8'):
"""Run a command with a non blocking call.
Execute `cmd` command with a non blocking call. The command will
be run in the directory set by `cwd`. Enviroment variables can be
set using the `env` dictionary. The output data is returned
as encoded bytes in an iterator. Each item will be a line of the
output.
:returns: an iterator with the output of the command as encoded bytes
:raises RepositoryError: when an error occurs running the command
"""
self.failed_message = None
logger.debug("Running command %s (cwd: %s, env: %s)",
' '.join(cmd), cwd, str(env))
try:
self.proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cwd,
env=env)
err_thread = threading.Thread(target=self._read_stderr,
kwargs={'encoding': encoding},
daemon=True)
err_thread.start()
for line in self.proc.stdout:
yield line.decode(encoding, errors='surrogateescape')
err_thread.join()
self.proc.communicate()
self.proc.stdout.close()
self.proc.stderr.close()
except OSError as e:
err_thread.join()
raise RepositoryError(cause=str(e))
if self.proc.returncode != 0:
cause = "git command - %s (return code: %d)" % \
(self.failed_message, self.proc.returncode)
raise RepositoryError(cause=cause)
def _read_stderr(self, encoding='utf-8'):
"""Reads self.proc.stderr.
Usually, this should be read in a thread, to prevent blocking
the read from stdout of the stderr buffer is filled, and this
function is not called becuase the program is busy in the
stderr reading loop.
Reads self.proc.stderr (self.proc is the subprocess running
the git command), and reads / writes self.failed_message
(the message sent to stderr when git fails, usually one line).
"""
for line in self.proc.stderr:
err_line = line.decode(encoding, errors='surrogateescape')
if self.proc.returncode != 0:
# If the subprocess didn't finish successfully, we expect
# the last line in stderr to provide the cause
if self.failed_message is not None:
# We had a message, there is a newer line, print it
logger.debug("Git log stderr: " + self.failed_message)
self.failed_message = err_line
else:
# The subprocess is successfully up to now, print the line
logger.debug("Git log stderr: " + err_line)
@staticmethod
def _exec(cmd, cwd=None, env=None, ignored_error_codes=None,
encoding='utf-8'):
"""Run a command.
Execute `cmd` command in the directory set by `cwd`. Environment
variables can be set using the `env` dictionary. The output
data is returned as encoded bytes.
Commands which their returning status codes are non-zero will
be treated as failed. Error codes considered as valid can be
ignored giving them in the `ignored_error_codes` list.
:returns: the output of the command as encoded bytes
:raises RepositoryError: when an error occurs running the command
"""
if ignored_error_codes is None:
ignored_error_codes = []
logger.debug("Running command %s (cwd: %s, env: %s)",
' '.join(cmd), cwd, str(env))
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cwd, env=env)
(outs, errs) = proc.communicate()
except OSError as e:
raise RepositoryError(cause=str(e))
if proc.returncode != 0 and proc.returncode not in ignored_error_codes:
err = errs.decode(encoding, errors='surrogateescape')
cause = "git command - %s" % err
raise RepositoryError(cause=cause)
else:
logger.debug(errs.decode(encoding, errors='surrogateescape'))
return outs
| gpl-3.0 |
uniteddiversity/mediadrop | mediadrop/model/tests/media_test.py | 7 | 2851 | # This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2015 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
from mediadrop.model import DBSession, Media
from mediadrop.lib.filetypes import VIDEO
from mediadrop.lib.i18n import setup_global_translator
from mediadrop.lib.players import AbstractFlashPlayer, FlowPlayer
from mediadrop.lib.storage.api import add_new_media_file
from mediadrop.lib.test.db_testcase import DBTestCase
from mediadrop.lib.test.pythonic_testcase import *
from mediadrop.plugin import events
from mediadrop.plugin.events import observes
class MediaTest(DBTestCase):
def setUp(self):
super(MediaTest, self).setUp()
setup_global_translator(registry=self.paste_registry)
self.init_flowplayer()
self.media = Media.example()
self.encoding_event = self.create_spy_on_event(events.Media.encoding_done)
def init_flowplayer(self):
AbstractFlashPlayer.register(FlowPlayer)
FlowPlayer.inject_in_db(enable_player=True)
def create_spy_on_event(self, event):
encoding_event = create_spy()
observes(event)(encoding_event)
return encoding_event
def add_external_file(self, media, url=u'http://site.example/videos.mp4'):
previous_files = len(media.files)
media_file = add_new_media_file(media, url=url)
# add_new_media_file will set media_file.media AND media.files.append
# so we have two files for the media until the session is refreshed.
DBSession.refresh(media)
assert_length(previous_files+1, media.files)
return media_file
def test_can_update_status(self):
assert_false(self.media.encoded)
self.media.update_status()
assert_false(self.media.encoded)
self.encoding_event.assert_was_not_called()
def test_triggers_event_when_media_was_encoded(self):
self.add_external_file(self.media)
assert_false(self.media.encoded)
self.media.update_status()
assert_equals(VIDEO, self.media.type)
assert_true(self.media.encoded)
self.encoding_event.assert_was_called_with(self.media)
# only send event when the encoding status changes!
second_encoding_event = self.create_spy_on_event(events.Media.encoding_done)
self.media.update_status()
second_encoding_event.assert_was_not_called()
import unittest
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MediaTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| gpl-3.0 |
atcemgil/notes | HiddenMarkovModel.py | 1 | 8122 | import numpy as np
from notes_utilities import randgen, log_sum_exp, normalize_exp, normalize
class HMM(object):
def __init__(self, pi, A, B):
# p(x_0)
self.pi = pi
# p(x_k|x_{k-1})
self.A = A
# p(y_k|x_{k})
self.B = B
# Number of possible latent states at each time
self.S = pi.shape[0]
# Number of possible observations at each time
self.R = B.shape[0]
self.logB = np.log(self.B)
self.logA = np.log(self.A)
self.logpi = np.log(self.pi)
def set_param(self, pi=None, A=None, B=None):
if pi is not None:
self.pi = pi
self.logpi = np.log(self.pi)
if A is not None:
self.A = A
self.logA = np.log(self.A)
if B is not None:
self.B = B
self.logB = np.log(self.B)
@classmethod
def from_random_parameters(cls, S=3, R=5):
A = np.random.dirichlet(0.7*np.ones(S),S).T
B = np.random.dirichlet(0.7*np.ones(R),S).T
pi = np.random.dirichlet(0.7*np.ones(S)).T
return cls(pi, A, B)
def __str__(self):
s = "Prior:\n" + str(self.pi) + "\nA:\n" + str(self.A) + "\nB:\n" + str(self.B)
return s
def __repr__(self):
s = self.__str__()
return s
def predict(self, lp):
lstar = np.max(lp)
return lstar + np.log(np.dot(self.A,np.exp(lp-lstar)))
def postdict(self, lp):
lstar = np.max(lp)
return lstar + np.log(np.dot(np.exp(lp-lstar), self.A))
def predict_maxm(self, lp):
return np.max(self.logA + lp, axis=1)
def postdict_maxm(self, lp):
return np.max(self.logA.T + lp, axis=1)
def update(self, y, lp):
return self.logB[y,:] + lp if not np.isnan(y) else lp
def generate_sequence(self, T=10):
# T: Number of steps
x = np.zeros(T, int)
y = np.zeros(T, int)
for t in range(T):
if t==0:
x[t] = randgen(self.pi)
else:
x[t] = randgen(self.A[:,x[t-1]])
y[t] = randgen(self.B[:,x[t]])
return y, x
def forward(self, y, maxm=False):
T = len(y)
# Forward Pass
# Python indices start from zero so
# log \alpha_{k|k} will be in log_alpha[:,k-1]
# log \alpha_{k|k-1} will be in log_alpha_pred[:,k-1]
log_alpha = np.zeros((self.S, T))
log_alpha_pred = np.zeros((self.S, T))
for k in range(T):
if k==0:
log_alpha_pred[:,0] = self.logpi
else:
if maxm:
log_alpha_pred[:,k] = self.predict_maxm(log_alpha[:,k-1])
else:
log_alpha_pred[:,k] = self.predict(log_alpha[:,k-1])
log_alpha[:,k] = self.update(y[k], log_alpha_pred[:,k])
return log_alpha, log_alpha_pred
def backward(self, y, maxm=False):
# Backward Pass
T = len(y)
log_beta = np.zeros((self.S, T))
log_beta_post = np.zeros((self.S, T))
for k in range(T-1,-1,-1):
if k==T-1:
log_beta_post[:,k] = np.zeros(self.S)
else:
if maxm:
log_beta_post[:,k] = self.postdict_maxm(log_beta[:,k+1])
else:
log_beta_post[:,k] = self.postdict(log_beta[:,k+1])
log_beta[:,k] = self.update(y[k], log_beta_post[:,k])
return log_beta, log_beta_post
def forward_backward_smoother(self, y):
log_alpha, log_alpha_pred = self.forward(y)
log_beta, log_beta_post = self.backward(y)
log_gamma = log_alpha + log_beta_post
return log_gamma
def viterbi(self, y):
T = len(y)
# Forward Pass
log_alpha = np.zeros((self.S, T))
for k in range(T):
if k==0:
log_alpha_pred = self.logpi
else:
log_alpha_pred = self.predict(log_alpha[:,k-1])
log_alpha[:,k] = self.update(y[k], log_alpha_pred)
xs = list()
w = np.argmax(log_alpha[:,-1])
xs.insert(0, w)
for k in range(T-2,-1,-1):
w = np.argmax(log_alpha[:,k] + self.logA[w,:])
xs.insert(0, w)
return xs
def viterbi_maxsum(self, y):
'''Vanilla implementation of Viterbi decoding via max-sum'''
'''This algorithm may fail to find the MAP trajectory as it breaks ties arbitrarily'''
log_alpha, log_alpha_pred = self.forward(y, maxm=True)
log_beta, log_beta_post = self.backward(y, maxm=True)
log_delta = log_alpha + log_beta_post
return np.argmax(log_delta, axis=0)
def correction_smoother(self, y):
# Correction Smoother
log_alpha, log_alpha_pred = self.forward(y)
T = len(y)
# For numerical stability, we calculate everything in the log domain
log_gamma_corr = np.zeros_like(log_alpha)
log_gamma_corr[:,T-1] = log_alpha[:,T-1]
C2 = np.zeros((self.S, self.S))
C3 = np.zeros((self.R, self.S))
C3[y[-1],:] = normalize_exp(log_alpha[:,T-1])
for k in range(T-2,-1,-1):
log_old_pairwise_marginal = log_alpha[:,k].reshape(1,self.S) + self.logA
log_old_marginal = self.predict(log_alpha[:,k])
log_new_pairwise_marginal = log_old_pairwise_marginal + log_gamma_corr[:,k+1].reshape(self.S,1) - log_old_marginal.reshape(self.S,1)
log_gamma_corr[:,k] = log_sum_exp(log_new_pairwise_marginal, axis=0).reshape(self.S)
C2 += normalize_exp(log_new_pairwise_marginal)
C3[y[k],:] += normalize_exp(log_gamma_corr[:,k])
C1 = normalize_exp(log_gamma_corr[:,0])
return log_gamma_corr, C1, C2, C3
def forward_only_SS(self, y, V=None):
# Forward only estimation of expected sufficient statistics
T = len(y)
if V is None:
V1 = np.eye((self.S))
V2 = np.zeros((self.S,self.S,self.S))
V3 = np.zeros((self.R,self.S,self.S))
else:
V1, V2, V3 = V
I_S1S = np.eye(self.S).reshape((self.S,1,self.S))
I_RR = np.eye(self.R)
for k in range(T):
if k==0:
log_alpha_pred = self.logpi
else:
log_alpha_pred = self.predict(log_alpha)
if k>0:
#print(self.S, self.R)
#print(log_alpha)
# Calculate p(x_{k-1}|y_{1:k-1}, x_k)
lp = np.log(normalize_exp(log_alpha)).reshape(self.S,1) + self.logA.T
P = normalize_exp(lp, axis=0)
# Update
V1 = np.dot(V1, P)
V2 = np.dot(V2, P) + I_S1S*P.reshape((1,self.S,self.S))
V3 = np.dot(V3, P) + I_RR[:,y[k-1]].reshape((self.R,1,1))*P.reshape((1,self.S,self.S))
log_alpha = self.update(y[k], log_alpha_pred)
p_xT = normalize_exp(log_alpha)
C1 = np.dot(V1, p_xT.reshape(self.S,1))
C2 = np.dot(V2, p_xT.reshape(1,self.S,1)).reshape((self.S,self.S))
C3 = np.dot(V3, p_xT.reshape(1,self.S,1)).reshape((self.R,self.S))
C3[y[-1],:] += p_xT
ll = log_sum_exp(log_alpha)
return C1, C2, C3, ll, (V1, V2, V3)
def train_EM(self, y, EPOCH=10):
LL = np.zeros(EPOCH)
for e in range(EPOCH):
C1, C2, C3, ll, V = self.forward_only_SS(y)
LL[e] = ll
p = normalize(C1 + 0.1, axis=0).reshape(self.S)
#print(p,np.size(p))
A = normalize(C2, axis=0)
#print(A)
B = normalize(C3, axis=0)
#print(B)
self.__init__(p, A, B)
# print(ll)
return LL
| mit |
SpoonITCurrency/SpoonITCoin | contrib/pyminer/pyminer.py | 766 | 6434 | #!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 9332
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
| mit |
dandygithub/kodi | addons/plugin.audio.dandy.amdm.ru/default.py | 1 | 19076 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Writer (c) 2018, dandy
# Rev. 1.0.0
# Licence: GPL v.3: http://www.gnu.org/copyleft/gpl.html
import os
import urllib
import urllib2
import sys
import re
import socket
import json
import xbmc
import xbmcplugin
import xbmcgui
import xbmcaddon
from operator import itemgetter
import XbmcHelpers
common = XbmcHelpers
socket.setdefaulttimeout(120)
class AmDm():
def __init__(self):
self.id = 'plugin.audio.dandy.amdm.ru'
self.addon = xbmcaddon.Addon(self.id)
self.icon = self.addon.getAddonInfo('icon')
self.path = self.addon.getAddonInfo('path')
self.profile = self.addon.getAddonInfo('profile')
self.language = self.addon.getLocalizedString
self.handle = int(sys.argv[1])
self.params = sys.argv[2]
self.url = 'https://amdm.ru'
self.inext = os.path.join(self.path, 'resources/icons/next.png')
def main(self):
self.log("Addon: %s" % self.id)
self.log("Handle: %d" % self.handle)
self.log("Params: %s" % self.params)
params = common.getParameters(self.params)
mode = params['mode'] if 'mode' in params else None
url = urllib.unquote_plus(params['url']) if 'url' in params else None
page = int(params['page']) if 'page' in params else None
item = int(params['item']) if 'item' in params else None
keyword = urllib.unquote_plus(params['keyword']) if 'keyword' in params else None
tone = int(params['tone']) if 'tone' in params else 0
if page == 0:
xbmc.executebuiltin('Container.Update(%s, replace)' % sys.argv[0])
elif mode == 'show':
self.show(url, tone)
elif mode == 'items':
self.getItems(url, page, item, keyword)
elif mode == 'items2':
self.getItems2(url)
elif mode == 'subitems2':
self.getSubItems2(url)
elif mode == 'alphabet':
self.alphabet()
elif mode == 'search':
self.search()
elif mode == 'text':
self.text(url, tone)
elif mode == 'video':
self.video(url)
elif mode == 'akkords':
self.akkords(url, tone)
elif mode == 'tone':
self.tone(url, tone)
elif mode == 'empty':
self.empty()
elif mode is None:
self.menu()
def menu(self):
uri = sys.argv[0] + "?mode=search"
item = xbmcgui.ListItem("[COLOR=lightgreen]%s[/COLOR]" % self.language(1000), thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
self.getMainItems()
uri = sys.argv[0] + "?mode=alphabet"
item = xbmcgui.ListItem("[COLOR=orange]%s[/COLOR]" % self.language(1001), thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def getMainItems(self):
response = common.fetchPage({"link": self.url})
if response["status"] == 200:
content = common.parseDOM(response["content"], "ul", attrs={"class": "sub-menu g-padding sub-menu--active"})[0]
items = common.parseDOM(content, "li")
labels = common.parseDOM(items, "a")
links = common.parseDOM(items, "a", ret="href")
for i, item in enumerate(labels):
if (i > 3):
break
uri = sys.argv[0] + "?mode=items&item=%s&url=%s" % (str(i), "https:" + links[i])
item_ = xbmcgui.ListItem(self.strip(item), iconImage=self.icon, thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item_, True)
def alphabet(self):
response = common.fetchPage({"link": self.url})
if response["status"] == 200:
content = common.parseDOM(response["content"], "div", attrs={"class": "alphabet g-margin"})[0]
items = common.parseDOM(content, "li")
labels = common.parseDOM(items, "a")
links = common.parseDOM(items, "a", ret="href")
for i, item in enumerate(labels):
uri = sys.argv[0] + "?mode=subitems2&url=%s" % (self.url + links[i])
item_ = xbmcgui.ListItem(self.strip(item), iconImage=self.icon, thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item_, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def getSubItems1(self, url):
response = common.fetchPage({"link": url})
if response["status"] == 200:
content = common.parseDOM(response["content"], "ul", attrs={"class": "h1__tabs"})[0]
items = common.parseDOM(content, "li")
for i, item in enumerate(items):
label = common.parseDOM(item, "a")[0] if common.parseDOM(item, "a") else common.parseDOM(item, "span")[0]
link = self.url + common.parseDOM(item, "a", ret="href")[0] if common.parseDOM(item, "a") else self.url + "/akkordi/popular/"
uri = sys.argv[0] + "?mode=items&url=%s" % (link)
item_ = xbmcgui.ListItem(self.strip(label), iconImage=self.icon, thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item_, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def getSubItems2(self, url):
response = common.fetchPage({"link": url})
if response["status"] == 200:
content = common.parseDOM(response["content"], "table", attrs={"class": "items"})
items = common.parseDOM(content, "tr")
photo_tds = common.parseDOM(items, "td", attrs={"class": "photo"})
photos = common.parseDOM(photo_tds, "img", ret="src")
tds = common.parseDOM(items, "td", attrs={"class": "artist_name"})
labels = common.parseDOM(tds, "a")
links = common.parseDOM(tds, "a", ret="href")
for i, item in enumerate(labels):
uri = sys.argv[0] + '?mode=items2&url=%s' % ("https:" + links[i])
try:
photo = ("https:" + photos[i]).replace("33x33", "250")
except:
photo = self.icon
sub = tds[i]
numbers = common.parseDOM(items[i], "td", attrs={"class": "number"})
item_ = xbmcgui.ListItem(self.strip("[COLOR=lightgreen]%s[/COLOR]%s [COLOR=lightblue][%s][/COLOR]" % (labels[i], " - [I]" + sub.split("<br>")[-1] + "[/I]" if "<br>" in sub else "", numbers[0])), iconImage=photo, thumbnailImage=photo)
xbmcplugin.addDirectoryItem(self.handle, uri, item_, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def getSubItems3(self, url, page):
if (not page):
page = 1
response = common.fetchPage({"link": url}) if (not page) else common.fetchPage({"link": url + "page" + str(page) + "/"})
if response["status"] == 200:
content = common.parseDOM(response["content"], "table", attrs={"class": "items"})
items = common.parseDOM(content, "tr")
photo_tds = common.parseDOM(items, "td", attrs={"class": "photo"})
photos = common.parseDOM(photo_tds, "img", ret="src")
tds = common.parseDOM(items, "td", attrs={"class": "artist_name"})
labels = common.parseDOM(tds, "a")
links = common.parseDOM(tds, "a", ret="href")
for i, item in enumerate(labels):
uri = sys.argv[0] + '?mode=items2&url=%s' % ("https:" + links[i])
try:
photo = ("https:" + photos[i]).replace("33x33", "250")
except:
photo = self.icon
sub = tds[i]
numbers = common.parseDOM(items[i], "td", attrs={"class": "number"})
item_ = xbmcgui.ListItem(self.strip("[COLOR=lightgreen]%s[/COLOR]%s [COLOR=blue][%s][/COLOR]" % (labels[i], " - [I]" + sub.split("<br>")[-1] + "[/I]" if "<br>" in sub else "", numbers[0])), iconImage=photo, thumbnailImage=photo)
xbmcplugin.addDirectoryItem(self.handle, uri, item_, True)
nav = common.parseDOM(response["content"], "ul", attrs={"class": "nav-pages"})
if page and nav:
uri = sys.argv[0] + "?mode=items&item=3&url=%s&page=%s"%(url, str(page + 1))
item = xbmcgui.ListItem("[COLOR=orange]%s[/COLOR]" % (self.language(2000)%(str(page + 1))), thumbnailImage=self.inext)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def getItems(self, url, page, item=0, keyword=None):
xbmc.log("item=" + repr(item))
if item == 1:
self.getSubItems1(url)
return
if item == 2:
self.getSubItems2(url)
return
if item == 3:
self.getSubItems3(url, page)
return
xbmc.log("page=" + repr(page))
if (not page):
page = 1
params = {}
if keyword:
params = { "q": keyword }
if keyword:
response = common.fetchPage({"link": url + "page" + str(page) + "/?" + urllib.urlencode(params)})
else:
response = common.fetchPage({"link": url + "page" + str(page) + "/" })
if response["status"] == 200:
content = common.parseDOM(response["content"], "table", attrs={"class": "items"})
items = common.parseDOM(content, "tr")
photo_tds = common.parseDOM(items, "td", attrs={"class": "photo"})
photos = common.parseDOM(photo_tds, "img", ret="src")
tds = common.parseDOM(items, "td", attrs={"class": "artist_name"})
labels = common.parseDOM(tds, "a")
links = common.parseDOM(tds, "a", ret="href")
label = ""
for i, item in enumerate(labels):
if (i % 2) == 1:
uri = sys.argv[0] + '?mode=show&url=%s' % ("https:" + links[i])
try:
photo = (self.url + photos[(i-1)/2]).replace("33x33", "250")
except:
photo = self.icon
item_ = xbmcgui.ListItem(self.strip("[COLOR=lightgreen]%s[/COLOR]" % label + " - " + labels[i]), iconImage=photo, thumbnailImage=photo)
xbmcplugin.addDirectoryItem(self.handle, uri, item_, True)
else:
label = labels[i]
nav = common.parseDOM(response["content"], "ul", attrs={"class": "nav-pages"})
if page and nav:
uri = sys.argv[0] + "?mode=items&url=%s&page=%s"%(url, str(page + 1))
if keyword:
uri = uri + "&keyword=" + urllib.quote_plus(keyword)
item = xbmcgui.ListItem("[COLOR=orange]%s[/COLOR]" % (self.language(2000)%(str(page + 1))), thumbnailImage=self.inext)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def getItems2(self, url):
response = common.fetchPage({"link": url})
if response["status"] == 200:
photo_div = common.parseDOM(response["content"], "div", attrs={"class": "artist-profile__photo debug1"})[0]
photo = "https:" + common.parseDOM(photo_div, "img", ret="src")[0]
content = common.parseDOM(response["content"], "table", attrs={"id": "tablesort"})
items = common.parseDOM(content, "tr")
labels = common.parseDOM(items, "a")
links = common.parseDOM(items, "a", ret="href")
for i, item in enumerate(items):
uri = sys.argv[0] + '?mode=show&url=%s' % ("https:" + links[i])
item_ = xbmcgui.ListItem(self.strip("%s" % labels[i]), iconImage=photo, thumbnailImage=photo)
xbmcplugin.addDirectoryItem(self.handle, uri, item_, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def showText(self, heading, text):
id = 10147
xbmc.executebuiltin('ActivateWindow(%d)' % id)
xbmc.sleep(500)
win = xbmcgui.Window(id)
retry = 50
while (retry > 0):
try:
xbmc.sleep(10)
retry -= 1
win.getControl(1).setLabel(heading)
win.getControl(5).setText(text)
return
except:
pass
def show(self, url, tone = 0):
uri = sys.argv[0] + "?mode=text&tone=%s&url=%s" % (str(tone), url)
item = xbmcgui.ListItem("%s" % "[COLOR=lightgreen]" + self.language(3000) + "[/COLOR]", thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
uri = sys.argv[0] + "?mode=video&url=%s" % (url)
item = xbmcgui.ListItem("%s" % self.language(3001), thumbnailImage=self.icon)
item.setInfo(type='Video', infoLabels={})
item.setProperty('IsPlayable', 'true')
xbmcplugin.addDirectoryItem(self.handle, uri, item, False)
uri = sys.argv[0] + "?mode=akkords&url=%s&tone=%s" % (url, str(tone))
item = xbmcgui.ListItem("%s" % self.language(3002), thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
uri = sys.argv[0] + "?mode=tone&url=%s&tone=%s" % (url, str(tone))
item = xbmcgui.ListItem("%s - [COLOR=lightblue]%s[/COLOR]" % (self.language(3003), tone), thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def text(self, url, tone=0):
response = common.fetchPage({"link": url})
if response["status"] == 200:
content = common.parseDOM(response["content"], "div", attrs={"class": "b-podbor "})[0]
label = common.parseDOM(content, "span", attrs={"itemprop": "byArtist"})[0]
label += " - " + common.parseDOM(content, "span", attrs={"itemprop": "name"})[0]
comment = common.parseDOM(response["content"], "pre", attrs={"class": "b-podbor__comment"})
if tone != 0:
data = self.getToneData(url, tone)
jdata = json.loads(data)
text = jdata["song_text"]
else:
text = common.parseDOM(content, "pre", attrs={"itemprop": "chordsBlock"})[0]
text = text.replace("<b>", "[COLOR orange]")
text = text.replace("</b>", "[/COLOR]")
if comment:
text = "[I]" + comment[0] + "[/I]\n\n" + text
self.showText(label, text)
def video(self, url):
response = common.fetchPage({"link": url})
if response["status"] == 200:
try:
content = common.parseDOM(response["content"], "div", attrs={"class": "b-video"})[0]
data = common.parseDOM(content, "iframe", ret="src")[0]
videoId = data.split("/")[-1]
link = "plugin://plugin.video.youtube/play/?video_id=" + videoId
item = xbmcgui.ListItem(path = link)
xbmcplugin.setResolvedUrl(self.handle, True, item)
except:
self.showWarningMessage(self.language(4000))
def getToneData(self, url, tone):
data = None
response = common.fetchPage({"link": url})
if response["status"] == 200:
song_id = common.parseDOM(response["content"], "input", attrs={"name": "song_id"}, ret="value")[0]
link = self.url + "/json/song/transpon/"
values = { "song_id": song_id, "tone": tone }
response = common.fetchPage({"link": link, "post_data": values})
if response["status"] == 200:
data = response["content"]
return data
def empty(self):
return False
def akkords(self, url, tone=0):
data = self.getToneData(url, tone)
jdata = json.loads(data)
chords = jdata["song_chords"]
text = jdata["song_text"]
for chord in chords:
try:
chord_ = chords[chord]
except:
chord_ = chord
image = self.url + "/images/chords/" + chord_.replace('+', 'p').replace('-', 'z').replace('#', 'w').replace('/', 's') + "_0.gif"
uri = sys.argv[0] + "?mode=empty"
item = xbmcgui.ListItem(chord_, thumbnailImage=image)
xbmcplugin.addDirectoryItem(self.handle, uri, item, False)
xbmcplugin.setContent(self.handle, 'movies')
xbmcplugin.endOfDirectory(self.handle, True)
xbmc.executebuiltin("Container.SetViewMode(0)")
for i in range(1, 2):
xbmc.executebuiltin("Container.NextViewMode")
def tone(self, url, tone=0):
for tone_ in range(13):
uri = sys.argv[0] + "?mode=show&url=%s&tone=%s" % (url, str(tone_ - 6))
item = xbmcgui.ListItem("%s" % str(tone_ - 6), thumbnailImage=self.icon)
xbmcplugin.addDirectoryItem(self.handle, uri, item, True)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
def get_user_input(self):
kbd = xbmc.Keyboard()
kbd.setDefault('')
kbd.setHeading(self.language(2000))
kbd.doModal()
keyword = None
if kbd.isConfirmed():
keyword = kbd.getText()
return keyword
def search(self):
keyword = self.get_user_input()
if (not keyword) or (keyword == ""):
return
self.getItems(self.url + "/search/", 1, 0, keyword)
xbmcplugin.setContent(self.handle, 'files')
xbmcplugin.endOfDirectory(self.handle, True)
# *** Add-on helpers
def log(self, message):
print "### %s: %s" % (self.id, message)
def error(self, message):
print "%s ERROR: %s" % (self.id, message)
def showErrorMessage(self, msg):
print msg
xbmc.executebuiltin("XBMC.Notification(%s, %s, %s)" % ("ERROR", msg, str(5 * 1000)))
def showWarningMessage(self, msg):
print msg
xbmc.executebuiltin("XBMC.Notification(%s, %s, %s)" % ("WARNING", msg, str(5 * 1000)))
def strip(self, string):
return common.stripTags(string)
def encode(self, string):
return string.decode('cp1251').encode('utf-8')
def decode(self, string):
return string.decode('utf-8').encode('cp1251')
def strip(self, string):
return common.stripTags(string)
AmDm = AmDm()
AmDm.main()
| gpl-3.0 |
ryanGT/sympy | sympy/core/relational.py | 2 | 4080 |
from basic import Basic, C
from sympify import _sympify
from numbers import Number
def Rel(a, b, op):
"""
A handy wrapper around the Relational class.
Rel(a,b, op)
Example:
>>> from sympy import *
>>> x,y = symbols('xy')
>>> Rel(y, x+x**2, '==')
y == x + x**2
"""
return Relational(a,b,op)
def Eq(a, b=0):
"""
A handy wrapper around the Relational class.
Eq(a,b)
Example:
>>> from sympy import *
>>> x,y = symbols('xy')
>>> Eq(y, x+x**2)
y == x + x**2
"""
return Relational(a,b,'==')
def Ne(a, b):
"""
A handy wrapper around the Relational class.
Ne(a,b)
Example:
>>> from sympy import *
>>> x,y = symbols('xy')
>>> Ne(y, x+x**2)
y != x + x**2
"""
return Relational(a,b,'!=')
def Lt(a, b):
"""
A handy wrapper around the Relational class.
Lt(a,b)
Example:
>>> from sympy import *
>>> x,y = symbols('xy')
>>> Lt(y, x+x**2)
y < x + x**2
"""
return Relational(a,b,'<')
def Le(a, b):
"""
A handy wrapper around the Relational class.
Le(a,b)
Example:
>>> from sympy import *
>>> x,y = symbols('xy')
>>> Le(y, x+x**2)
y <= x + x**2
"""
return Relational(a,b,'<=')
def Gt(a, b):
"""
A handy wrapper around the Relational class.
Gt(a,b)
Example:
>>> from sympy import *
>>> x,y = symbols('xy')
>>> Gt(y, x+x**2)
x + x**2 < y
"""
return Relational(a,b,'>')
def Ge(a, b):
"""
A handy wrapper around the Relational class.
Ge(a,b)
Example:
>>> from sympy import *
>>> x,y = symbols('xy')
>>> Ge(y, x+x**2)
x + x**2 <= y
"""
return Relational(a,b,'>=')
class Relational(Basic):
__slots__ = []
@staticmethod
def get_relational_class(rop):
if rop is None or rop in ['==','eq']: return Equality, False
if rop in ['!=','<>','ne']: return Unequality, False
if rop in ['<','lt']: return StrictInequality, False
if rop in ['>','gt']: return StrictInequality, True
if rop in ['<=','le']: return Inequality, False
if rop in ['>=','ge']: return Inequality, True
raise ValueError("Invalid relational operator symbol: %r" % (rop))
def __new__(cls, lhs, rhs, rop=None, **assumptions):
lhs = _sympify(lhs)
rhs = _sympify(rhs)
if cls is not Relational:
rop_cls = cls
else:
rop_cls, swap = Relational.get_relational_class(rop)
if swap: lhs, rhs = rhs, lhs
obj = Basic.__new__(rop_cls, lhs, rhs, **assumptions)
return obj
@property
def lhs(self):
return self._args[0]
@property
def rhs(self):
return self._args[1]
def _eval_subs(self, old, new):
return self.__class__(self.lhs._eval_subs(old, new), self.rhs._eval_subs(old, new))
class Equality(Relational):
rel_op = '=='
__slots__ = []
def __nonzero__(self):
return self.lhs.compare(self.rhs)==0
class Unequality(Relational):
rel_op = '!='
__slots__ = []
def __nonzero__(self):
return self.lhs.compare(self.rhs)!=0
class StrictInequality(Relational):
rel_op = '<'
__slots__ = []
def __nonzero__(self):
if self.lhs.is_comparable and self.rhs.is_comparable:
if self.lhs.is_Number and self.rhs.is_Number:
return self.lhs < self.rhs
return self.lhs.evalf()<self.rhs.evalf()
return self.lhs.compare(self.rhs)==-1
class Inequality(Relational):
rel_op = '<='
__slots__ = []
def __nonzero__(self):
if self.lhs.is_comparable and self.rhs.is_comparable:
if self.lhs.is_Number and self.rhs.is_Number:
return self.lhs <= self.rhs
return self.lhs.evalf()<=self.rhs.evalf()
return self.lhs.compare(self.rhs)<=0
# /cyclic/
import basic as _
_.Equality = Equality
_.Unequality = Unequality
_.Inequality = Inequality
_.StrictInequality = StrictInequality
del _
| bsd-3-clause |
fafaman/django | tests/prefetch_related/test_uuid.py | 291 | 4347 | from __future__ import unicode_literals
from django.test import TestCase
from .models import Flea, House, Person, Pet, Room
class UUIDPrefetchRelated(TestCase):
def test_prefetch_related_from_uuid_model(self):
Pet.objects.create(name='Fifi').people.add(
Person.objects.create(name='Ellen'),
Person.objects.create(name='George'),
)
with self.assertNumQueries(2):
pet = Pet.objects.prefetch_related('people').get(name='Fifi')
with self.assertNumQueries(0):
self.assertEqual(2, len(pet.people.all()))
def test_prefetch_related_to_uuid_model(self):
Person.objects.create(name='Bella').pets.add(
Pet.objects.create(name='Socks'),
Pet.objects.create(name='Coffee'),
)
with self.assertNumQueries(2):
person = Person.objects.prefetch_related('pets').get(name='Bella')
with self.assertNumQueries(0):
self.assertEqual(2, len(person.pets.all()))
def test_prefetch_related_from_uuid_model_to_uuid_model(self):
fleas = [Flea.objects.create() for i in range(3)]
Pet.objects.create(name='Fifi').fleas_hosted.add(*fleas)
Pet.objects.create(name='Bobo').fleas_hosted.add(*fleas)
with self.assertNumQueries(2):
pet = Pet.objects.prefetch_related('fleas_hosted').get(name='Fifi')
with self.assertNumQueries(0):
self.assertEqual(3, len(pet.fleas_hosted.all()))
with self.assertNumQueries(2):
flea = Flea.objects.prefetch_related('pets_visited').get(pk=fleas[0].pk)
with self.assertNumQueries(0):
self.assertEqual(2, len(flea.pets_visited.all()))
class UUIDPrefetchRelatedLookups(TestCase):
@classmethod
def setUpTestData(cls):
house = House.objects.create(name='Redwood', address='Arcata')
room = Room.objects.create(name='Racoon', house=house)
fleas = [Flea.objects.create(current_room=room) for i in range(3)]
pet = Pet.objects.create(name='Spooky')
pet.fleas_hosted.add(*fleas)
person = Person.objects.create(name='Bob')
person.houses.add(house)
person.pets.add(pet)
person.fleas_hosted.add(*fleas)
def test_from_uuid_pk_lookup_uuid_pk_integer_pk(self):
# From uuid-pk model, prefetch <uuid-pk model>.<integer-pk model>:
with self.assertNumQueries(4):
spooky = Pet.objects.prefetch_related('fleas_hosted__current_room__house').get(name='Spooky')
with self.assertNumQueries(0):
self.assertEqual('Racoon', spooky.fleas_hosted.all()[0].current_room.name)
def test_from_uuid_pk_lookup_integer_pk2_uuid_pk2(self):
# From uuid-pk model, prefetch <integer-pk model>.<integer-pk model>.<uuid-pk model>.<uuid-pk model>:
with self.assertNumQueries(5):
spooky = Pet.objects.prefetch_related('people__houses__rooms__fleas').get(name='Spooky')
with self.assertNumQueries(0):
self.assertEqual(3, len(spooky.people.all()[0].houses.all()[0].rooms.all()[0].fleas.all()))
def test_from_integer_pk_lookup_uuid_pk_integer_pk(self):
# From integer-pk model, prefetch <uuid-pk model>.<integer-pk model>:
with self.assertNumQueries(3):
racoon = Room.objects.prefetch_related('fleas__people_visited').get(name='Racoon')
with self.assertNumQueries(0):
self.assertEqual('Bob', racoon.fleas.all()[0].people_visited.all()[0].name)
def test_from_integer_pk_lookup_integer_pk_uuid_pk(self):
# From integer-pk model, prefetch <integer-pk model>.<uuid-pk model>:
with self.assertNumQueries(3):
redwood = House.objects.prefetch_related('rooms__fleas').get(name='Redwood')
with self.assertNumQueries(0):
self.assertEqual(3, len(redwood.rooms.all()[0].fleas.all()))
def test_from_integer_pk_lookup_integer_pk_uuid_pk_uuid_pk(self):
# From integer-pk model, prefetch <integer-pk model>.<uuid-pk model>.<uuid-pk model>:
with self.assertNumQueries(4):
redwood = House.objects.prefetch_related('rooms__fleas__pets_visited').get(name='Redwood')
with self.assertNumQueries(0):
self.assertEqual('Spooky', redwood.rooms.all()[0].fleas.all()[0].pets_visited.all()[0].name)
| bsd-3-clause |
jokey2k/sentry | src/sentry/migrations/0003_auto__add_field_message_group__del_field_groupedmessage_server_name.py | 36 | 4187 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Message.group'
db.add_column('sentry_message', 'group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(blank=True, related_name='message_set', null=True, to=orm['sentry.GroupedMessage']), keep_default=False)
# Deleting field 'GroupedMessage.server_name'
db.delete_column('sentry_groupedmessage', 'server_name')
def backwards(self, orm):
# Deleting field 'Message.group'
db.delete_column('sentry_message', 'group_id')
# Adding field 'GroupedMessage.server_name'
db.add_column('sentry_groupedmessage', 'server_name', self.gf('django.db.models.fields.CharField')(default='', max_length=128, db_index=True), keep_default=False)
models = {
'sentry.groupedmessage': {
'Meta': {'unique_together': "(('logger', 'view', 'checksum'),)", 'object_name': 'GroupedMessage'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'sentry.message': {
'Meta': {'object_name': 'Message'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'message_set'", 'null': 'True', 'to': "orm['sentry.GroupedMessage']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['sentry']
| bsd-3-clause |
sensysnetworks/uClinux | user/python/Lib/lib-tk/Canvas.py | 4 | 6473 | # This module exports classes for the various canvas item types
# NOTE: This module was an experiment and is now obsolete.
# It's best to use the Tkinter.Canvas class directly.
from Tkinter import Canvas, _cnfmerge, _flatten
class CanvasItem:
def __init__(self, canvas, itemType, *args, **kw):
self.canvas = canvas
self.id = canvas._create(itemType, args, kw)
if not hasattr(canvas, 'items'):
canvas.items = {}
canvas.items[self.id] = self
def __str__(self):
return str(self.id)
def __repr__(self):
return '<%s, id=%d>' % (self.__class__.__name__, self.id)
def delete(self):
del self.canvas.items[self.id]
self.canvas.delete(self.id)
def __getitem__(self, key):
v = self.canvas.tk.split(self.canvas.tk.call(
self.canvas._w, 'itemconfigure',
self.id, '-' + key))
return v[4]
cget = __getitem__
def __setitem__(self, key, value):
self.canvas.itemconfig(self.id, {key: value})
def keys(self):
if not hasattr(self, '_keys'):
self._keys = map(lambda x, tk=self.canvas.tk:
tk.splitlist(x)[0][1:],
self.canvas.tk.splitlist(
self.canvas._do(
'itemconfigure',
(self.id,))))
return self._keys
def has_key(self, key):
return key in self.keys()
def addtag(self, tag, option='withtag'):
self.canvas.addtag(tag, option, self.id)
def bbox(self):
x1, y1, x2, y2 = self.canvas.bbox(self.id)
return (x1, y1), (x2, y2)
def bind(self, sequence=None, command=None, add=None):
return self.canvas.tag_bind(self.id, sequence, command, add)
def unbind(self, sequence, funcid=None):
self.canvas.tag_unbind(self.id, sequence, funcid)
def config(self, cnf={}, **kw):
return self.canvas.itemconfig(self.id, _cnfmerge((cnf, kw)))
def coords(self, pts = ()):
flat = ()
for x, y in pts: flat = flat + (x, y)
return apply(self.canvas.coords, (self.id,) + flat)
def dchars(self, first, last=None):
self.canvas.dchars(self.id, first, last)
def dtag(self, ttd):
self.canvas.dtag(self.id, ttd)
def focus(self):
self.canvas.focus(self.id)
def gettags(self):
return self.canvas.gettags(self.id)
def icursor(self, index):
self.canvas.icursor(self.id, index)
def index(self, index):
return self.canvas.index(self.id, index)
def insert(self, beforethis, string):
self.canvas.insert(self.id, beforethis, string)
def lower(self, belowthis=None):
self.canvas.tag_lower(self.id, belowthis)
def move(self, xamount, yamount):
self.canvas.move(self.id, xamount, yamount)
def tkraise(self, abovethis=None):
self.canvas.tag_raise(self.id, abovethis)
raise_ = tkraise # BW compat
def scale(self, xorigin, yorigin, xscale, yscale):
self.canvas.scale(self.id, xorigin, yorigin, xscale, yscale)
def type(self):
return self.canvas.type(self.id)
class Arc(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'arc') + args, kw)
class Bitmap(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'bitmap') + args, kw)
class ImageItem(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'image') + args, kw)
class Line(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'line') + args, kw)
class Oval(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'oval') + args, kw)
class Polygon(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'polygon') + args,kw)
class Rectangle(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'rectangle')+args,kw)
# XXX "Text" is taken by the Text widget...
class CanvasText(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'text') + args, kw)
class Window(CanvasItem):
def __init__(self, canvas, *args, **kw):
apply(CanvasItem.__init__, (self, canvas, 'window') + args, kw)
class Group:
def __init__(self, canvas, tag=None):
if not tag:
tag = 'Group%d' % id(self)
self.tag = self.id = tag
self.canvas = canvas
self.canvas.dtag(self.tag)
def str(self):
return self.tag
__str__ = str
def _do(self, cmd, *args):
return self.canvas._do(cmd, (self.tag,) + _flatten(args))
def addtag_above(self, tagOrId):
self._do('addtag', 'above', tagOrId)
def addtag_all(self):
self._do('addtag', 'all')
def addtag_below(self, tagOrId):
self._do('addtag', 'below', tagOrId)
def addtag_closest(self, x, y, halo=None, start=None):
self._do('addtag', 'closest', x, y, halo, start)
def addtag_enclosed(self, x1, y1, x2, y2):
self._do('addtag', 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, x1, y1, x2, y2):
self._do('addtag', 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, tagOrId):
self._do('addtag', 'withtag', tagOrId)
def bbox(self):
return self.canvas._getints(self._do('bbox'))
def bind(self, sequence=None, command=None, add=None):
return self.canvas.tag_bind(self.id, sequence, command, add)
def unbind(self, sequence, funcid=None):
self.canvas.tag_unbind(self.id, sequence, funcid)
def coords(self, *pts):
return self._do('coords', pts)
def dchars(self, first, last=None):
self._do('dchars', first, last)
def delete(self):
self._do('delete')
def dtag(self, tagToDelete=None):
self._do('dtag', tagToDelete)
def focus(self):
self._do('focus')
def gettags(self):
return self.canvas.tk.splitlist(self._do('gettags', self.tag))
def icursor(self, index):
return self._do('icursor', index)
def index(self, index):
return self.canvas.tk.getint(self._do('index', index))
def insert(self, beforeThis, string):
self._do('insert', beforeThis, string)
def config(self, cnf={}, **kw):
return self.canvas.itemconfigure(self.tag, _cnfmerge((cnf,kw)))
def lower(self, belowThis=None):
self._do('tag_lower', belowThis)
def move(self, xAmount, yAmount):
self._do('move', xAmount, yAmount)
def tkraise(self, aboveThis=None):
self._do('tag_raise', aboveThis)
lift = tkraise
def scale(self, xOrigin, yOrigin, xScale, yScale):
self._do('scale', xOrigin, yOrigin, xScale, yScale)
def select_adjust(self, index):
self.canvas._do('select', ('adjust', self.tag, index))
def select_from(self, index):
self.canvas._do('select', ('from', self.tag, index))
def select_to(self, index):
self.canvas._do('select', ('to', self.tag, index))
def type(self):
return self._do('type')
| gpl-2.0 |
ATIX-AG/ansible | lib/ansible/modules/network/vyos/vyos_logging.py | 38 | 7851 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: vyos_logging
version_added: "2.4"
author: "Trishna Guha (@trishnaguha)"
short_description: Manage logging on network devices
description:
- This module provides declarative management of logging
on Vyatta Vyos devices.
notes:
- Tested against VYOS 1.1.7
options:
dest:
description:
- Destination of the logs.
choices: ['console', 'file', 'global', 'host', 'user']
name:
description:
- If value of C(dest) is I(file) it indicates file-name,
for I(user) it indicates username and for I(host) indicates
the host name to be notified.
facility:
description:
- Set logging facility.
level:
description:
- Set logging severity levels.
aggregate:
description: List of logging definitions.
state:
description:
- State of the logging configuration.
default: present
choices: ['present', 'absent']
extends_documentation_fragment: vyos
"""
EXAMPLES = """
- name: configure console logging
vyos_logging:
dest: console
facility: all
level: crit
- name: remove console logging configuration
vyos_logging:
dest: console
state: absent
- name: configure file logging
vyos_logging:
dest: file
name: test
facility: local3
level: err
- name: Add logging aggregate
vyos_logging:
aggregate:
- { dest: file, name: test1, facility: all, level: info }
- { dest: file, name: test2, facility: news, level: debug }
state: present
- name: Remove logging aggregate
vyos_logging:
aggregate:
- { dest: console, facility: all, level: info }
- { dest: console, facility: daemon, level: warning }
- { dest: file, name: test2, facility: news, level: debug }
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- set system syslog global facility all level notice
"""
import re
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.vyos.vyos import get_config, load_config
from ansible.module_utils.network.vyos.vyos import vyos_argument_spec
def spec_to_commands(updates, module):
commands = list()
want, have = updates
for w in want:
dest = w['dest']
name = w['name']
facility = w['facility']
level = w['level']
state = w['state']
del w['state']
if state == 'absent' and w in have:
if w['name']:
commands.append('delete system syslog {0} {1} facility {2} level {3}'.format(
dest, name, facility, level))
else:
commands.append('delete system syslog {0} facility {1} level {2}'.format(
dest, facility, level))
elif state == 'present' and w not in have:
if w['name']:
commands.append('set system syslog {0} {1} facility {2} level {3}'.format(
dest, name, facility, level))
else:
commands.append('set system syslog {0} facility {1} level {2}'.format(
dest, facility, level))
return commands
def config_to_dict(module):
data = get_config(module)
obj = []
for line in data.split('\n'):
if line.startswith('set system syslog'):
match = re.search(r'set system syslog (\S+)', line, re.M)
dest = match.group(1)
if dest == 'host':
match = re.search(r'host (\S+)', line, re.M)
name = match.group(1)
elif dest == 'file':
match = re.search(r'file (\S+)', line, re.M)
name = match.group(1)
elif dest == 'user':
match = re.search(r'user (\S+)', line, re.M)
name = match.group(1)
else:
name = None
if 'facility' in line:
match = re.search(r'facility (\S+)', line, re.M)
facility = match.group(1)
if 'level' in line:
match = re.search(r'level (\S+)', line, re.M)
level = match.group(1).strip("'")
obj.append({'dest': dest,
'name': name,
'facility': facility,
'level': level})
return obj
def map_params_to_obj(module, required_if=None):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
module._check_required_if(required_if, item)
obj.append(item.copy())
else:
if module.params['dest'] not in ('host', 'file', 'user'):
module.params['name'] = None
obj.append({
'dest': module.params['dest'],
'name': module.params['name'],
'facility': module.params['facility'],
'level': module.params['level'],
'state': module.params['state']
})
return obj
def main():
""" main entry point for module execution
"""
element_spec = dict(
dest=dict(type='str', choices=['console', 'file', 'global', 'host', 'user']),
name=dict(type='str'),
facility=dict(type='str'),
level=dict(type='str'),
state=dict(default='present', choices=['present', 'absent']),
)
aggregate_spec = deepcopy(element_spec)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(vyos_argument_spec)
required_if = [('dest', 'host', ['name', 'facility', 'level']),
('dest', 'file', ['name', 'facility', 'level']),
('dest', 'user', ['name', 'facility', 'level']),
('dest', 'console', ['facility', 'level']),
('dest', 'global', ['facility', 'level'])]
module = AnsibleModule(argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module, required_if=required_if)
have = config_to_dict(module)
commands = spec_to_commands((want, have), module)
result['commands'] = commands
if commands:
commit = not module.check_mode
load_config(module, commands, commit=commit)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
yk5/beam | sdks/python/apache_beam/utils/windowed_value_test.py | 16 | 2446 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the windowed_value."""
import copy
import pickle
import unittest
from apache_beam.utils import windowed_value
from apache_beam.utils.timestamp import Timestamp
class WindowedValueTest(unittest.TestCase):
def test_timestamps(self):
wv = windowed_value.WindowedValue(None, 3, ())
self.assertEqual(wv.timestamp, Timestamp.of(3))
self.assertTrue(wv.timestamp is wv.timestamp)
self.assertEqual(windowed_value.WindowedValue(None, -2.5, ()).timestamp,
Timestamp.of(-2.5))
def test_with_value(self):
wv = windowed_value.WindowedValue(1, 3, ())
self.assertEqual(wv.with_value(10), windowed_value.WindowedValue(10, 3, ()))
def test_equality(self):
self.assertEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(1, 3, ()))
self.assertNotEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(100, 3, ()))
self.assertNotEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(1, 300, ()))
self.assertNotEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(1, 300, ((),)))
self.assertNotEqual(
windowed_value.WindowedValue(1, 3, ()),
object())
def test_hash(self):
wv = windowed_value.WindowedValue(1, 3, ())
wv_copy = copy.copy(wv)
self.assertFalse(wv is wv_copy)
self.assertEqual({wv: 100}.get(wv_copy), 100)
def test_pickle(self):
wv = windowed_value.WindowedValue(1, 3, ())
self.assertTrue(pickle.loads(pickle.dumps(wv)) == wv)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
UnbDroid/robomagellan | Codigos/Raspberry/desenvolvimentoRos/devel/lib/python2.7/dist-packages/actionlib/msg/_TestRequestFeedback.py | 1 | 2926 | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from actionlib/TestRequestFeedback.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class TestRequestFeedback(genpy.Message):
_md5sum = "d41d8cd98f00b204e9800998ecf8427e"
_type = "actionlib/TestRequestFeedback"
_has_header = False #flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
"""
__slots__ = []
_slot_types = []
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(TestRequestFeedback, self).__init__(*args, **kwds)
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
| gpl-3.0 |
daviskirk/climatecontrol | climatecontrol/processors.py | 1 | 7460 | """Fragment processors."""
import glob
import logging
import os
from typing import Any, Callable, Iterable, Iterator, Mapping, Sequence, Tuple, Type
from climatecontrol.constants import REMOVED
from climatecontrol.file_loaders import (
FileLoader,
NoCompatibleLoaderFoundError,
iter_load,
load_from_filepath,
)
from climatecontrol.fragment import Fragment, FragmentPath
from climatecontrol.utils import parse_as_json_if_possible
logger = logging.getLogger(__name__)
def find_suffix(fragment: Fragment, suffix: str) -> Iterator[Fragment]:
value = fragment.value
if isinstance(value, Mapping):
items: Iterable[tuple] = value.items()
elif isinstance(value, Sequence) and not isinstance(value, str):
items = enumerate(value)
else:
return
for k, v in items:
new = fragment.clone(value=v, path=list(fragment.path) + [k])
if isinstance(k, str) and k.endswith(suffix):
yield new
else:
yield from find_suffix(new, suffix)
def replace_from_pattern(
fragment: Fragment,
postfix_trigger: str,
transform_value: Callable[[Any, FragmentPath], Any],
expected_exceptions: Tuple[Type[Exception], ...] = (),
):
"""Replace settings values using a given value transformation.
Args:
fragment: original fragment to search
postfix_trigger: String at end of key that should trigger the transformation
transform_value: Function to use to transform the value. The function should take two arguments:
* value: the value to transform
* path: the fragment path at which the value was found.
exected_exceptions: Tuple of exceptions to ignore if they are
raised. In this case the original key and it's value that
triggered the transformation is removed, and is not replaced
with a new value.
Yields:
Additional fragments to patch the original fragment.
"""
for leaf in find_suffix(fragment, postfix_trigger):
path = leaf.path
value = leaf.value
if not path or value == REMOVED:
continue
key = path[-1]
yield leaf.clone(value=REMOVED, path=path)
try:
# This allows "transform_value" to be a generator function as well.
new_value = transform_value(value, path)
if isinstance(new_value, Iterator):
items: list = list(new_value)
else:
items = [new_value]
except expected_exceptions:
continue
new_key = key[: -len(postfix_trigger)]
new_path = list(path[:-1])
if new_key:
new_path += [new_key]
for item in items:
if isinstance(item, Fragment):
kwargs = {}
if item.source:
kwargs["source"] = leaf.source + f":{item.source}"
yield leaf.clone(value=item.value, path=new_path, **kwargs)
else:
yield leaf.clone(value=item, path=new_path)
def replace_from_env_vars(
fragment: Fragment, postfix_trigger: str = "_from_env"
) -> Iterator[Fragment]:
"""Read and replace settings values from environment variables.
Args:
fragment: Fragment to process
postfix_trigger: Optionally configurable string to trigger a
replacement with an environment variable. If a key is found which
ends with this string, the value is assumed to be the name of an
environemtn variable and the settings value will be set to the
contents of that variable.
Yields:
Additional fragments to patch the original fragment.
"""
class ExpectedTransformError(Exception):
pass
def transform_value(value, path):
if not isinstance(value, str):
raise ValueError(
f"{postfix_trigger} replacement expects a string a a variable."
)
if "$" in value:
env_var_value = os.path.expandvars(value)
else:
try:
env_var_value = os.environ[value]
except KeyError as e:
logger.info(
"Error while trying to load environment variable: %s from %s. (%s) Skipping...",
value,
".".join(str(p) for p in path),
e,
)
raise ExpectedTransformError()
return parse_as_json_if_possible(env_var_value)
yield from replace_from_pattern(
fragment, postfix_trigger, transform_value, (ExpectedTransformError,)
)
def replace_from_file_vars(
fragment: Fragment, postfix_trigger: str = "_from_file"
) -> Iterator[Fragment]:
"""Read and replace settings values from content local files.
Args:
fragment: Fragment to process
postfix_trigger: Optionally configurable string to trigger a local
file value. If a key is found which ends with this string, the
value is assumed to be a file path and the settings value will
be set to the content of the file.
Yields:
Additional fragments to patch the original fragment.
"""
def transform_value(value: Any, path: FragmentPath) -> Any:
if isinstance(value, list):
# if we get a list, process each item one after another.
for item in value:
yield from transform_value(item, path)
return
if not isinstance(value, str):
raise ValueError("file path must be string")
try:
if glob.has_magic(value):
yield from iter_load(value)
return
try:
yield load_from_filepath(value)
return
except NoCompatibleLoaderFoundError:
# just load as plain text file and interpret as string
with open(value) as f:
yield f.read().strip()
return
except FileNotFoundError as e:
logger.info(
"Error while trying to load variable from file: %s. (%s) Skipping...",
value,
".".join(str(p) for p in path),
e,
)
yield from replace_from_pattern(fragment, postfix_trigger, transform_value)
def replace_from_content_vars(fragment: Fragment) -> Iterator[Fragment]:
"""Read and replace settings values from content local files.
Args:
fragment: Fragment to process
Yields:
Additional fragments to patch the original fragment.
"""
file_loader_map = {
ext.strip("."): loader
for loader in FileLoader.registered_loaders
for ext in loader.valid_file_extensions
}
for format_name, loader in file_loader_map.items():
postfix_trigger = f"_from_{format_name}_content"
def transform_value(value, path: FragmentPath):
try:
return loader.from_content(value)
except Exception:
path_str = ".".join(str(p) for p in path)
logger.info(
"Error while trying to load %s content at %s.",
format_name,
path_str,
)
raise
yield from replace_from_pattern(
fragment, postfix_trigger, transform_value, (Exception,)
)
| mit |
greedo/flask-oauth2-devices | myservice.py | 2 | 10080 | import OpenSSL
import hmac
import hashlib
from binascii import hexlify
from datetime import datetime, timedelta
from flask import Flask, abort, render_template
from flask import make_response, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask.ext.wtf import Form
from wtforms import StringField, SelectField
from wtforms.validators import DataRequired
from devices import OAuth2DevicesProvider, OAuth2Exception
from forms import ActivateForm
app = Flask(__name__)
app.config.update(
WTF_CSRF_ENABLED=True,
SECRET_KEY='our-big-bad-key',
FIXTURES_DIRS=['tests/fixtures']
)
app.config.update({
'SQLALCHEMY_DATABASE_URI': 'sqlite:///db.sqlite'
})
db = SQLAlchemy(app)
oauth = OAuth2DevicesProvider(app)
AUTH_EXPIRATION_TIME = 3600
OUR_KEY = 'our-big-bad-key'
@app.errorhandler(OAuth2Exception)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/oauth/device', methods=['POST'])
@oauth.code_handler("https://api.example.com/oauth/device/authorize",
"https://example.com/activate", 600, 600)
def code():
return None
@app.route('/oauth/device/authorize', methods=['POST'])
@oauth.authorize_handler()
def authorize():
return None
@app.route('/activate', methods=['GET', 'POST'])
def activate_view():
form = ActivateForm()
if form.validate_on_submit():
if request.method == "POST":
user_code = load_auth_code(request.values.get('user_code'))
if user_code is None or user_code.expires < datetime.utcnow():
return render_template('app_auth_error.html')
return redirect("/oauth/authorization/accept?user_code=\
" + str(user_code.code))
resp = make_response(render_template('user_code_activate.html', form=form))
resp.headers.extend({'X-Frame-Options': 'DENY'})
return resp
@app.route('/oauth/authorization/accept', methods=['GET', 'POST'])
def authorization_accept_view():
user_code = load_auth_code(request.values.get('user_code'))
all_scopes = ['private']
# public is our default scope in this case
if request.values.get('scopes') is None:
scopes = ['public']
else:
scopes = request.values.get('scopes').split()
non_scopes = [scope for scope in all_scopes if scope not in scopes]
resp = make_response(render_template('access_token_authorize.html',
app_id=user_code.client_id,
client_id=user_code.client_id,
user_code=user_code.code,
scopes=scopes,
non_scopes=non_scopes))
resp.headers.extend({'X-Frame-Options': 'DENY'})
return resp
@app.route('/confirmed', methods=['POST'])
def confirmed_view():
# just an extra check in case we didn't block GET in the decorator
if request.method != "POST":
resp = make_response("non-POST on access token", 405)
resp.headers.extend({'Allow': 'POST'})
return resp
client_id = request.values.get('client_id')
if client_id is None:
return make_response("missing client_id", 500)
# we can load our app by client_id here
# and throw a 500 if we have a problem
user_code = load_auth_code(request.values.get('user_code'))
if user_code is None:
return make_response("auth code must be sent", 400)
user_code.is_active = 1
db.session.commit()
resp = make_response(render_template('app_auth_confirm.html',
client_id=user_code.client_id))
resp.headers.extend({'X-Frame-Options': 'DENY'})
return resp
@app.route('/oauth/protect', methods=['GET', 'POST'])
@oauth.require_oauth()
def protect_handler():
return None
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(40), unique=True)
class Client(db.Model):
client_id = db.Column(db.String(40), primary_key=True)
client_secret = db.Column(db.String(55), nullable=False)
user_id = db.Column(db.ForeignKey('user.id'))
user = db.relationship('User')
_redirect_uris = db.Column(db.Text)
_default_scopes = db.Column(db.Text)
@property
def client_type(self):
return 'public'
@property
def redirect_uris(self):
if self._redirect_uris:
return self._redirect_uris.split()
return []
@property
def default_redirect_uri(self):
return self.redirect_uris[0]
@property
def default_scopes(self):
if self._default_scopes:
return self._default_scopes.split()
return []
class Token(db.Model):
id = db.Column(db.Integer, primary_key=True)
client_id = db.Column(
db.String(40), db.ForeignKey('client.client_id'),
nullable=False,
)
client = db.relationship('Client')
user_id = db.Column(
db.Integer, db.ForeignKey('user.id')
)
user = db.relationship('User')
# currently only bearer is supported
token_type = db.Column(db.String(40))
access_token = db.Column(db.String(255))
refresh_token = db.Column(db.String(255))
expires = db.Column(db.DateTime)
created = db.Column(db.DateTime)
_scopes = db.Column(db.Text)
@property
def scopes(self):
if self._scopes:
return self._scopes.split()
return []
def create_access_token(self, client_id, user_id, scope, token_type):
expires_in = AUTH_EXPIRATION_TIME
expires = datetime.utcnow() + timedelta(seconds=expires_in)
created = datetime.utcnow()
tok = Token(
client_id=client_id,
user_id=user_id,
access_token=None,
refresh_token=None,
token_type=token_type,
_scopes=("public private" if scope is None else ' '.join(scope)),
expires=expires,
created=created,
)
if tok.access_token is None:
tok.access_token = tok._generate_token()
db.session.add(tok)
db.session.commit()
return tok
def refresh(self, token):
tok = Token(
client_id=self.client_id,
user_id=self.user_id,
access_token=self.access_token,
refresh_token=None,
token_type=token_type,
_scopes=("public private" if scope is None else ' '.join(scope)),
expires=expires,
created=created,
)
if tok.refresh_token is None:
tok.refresh_token = tok._generate_refresh_token()
db.session.add(tok)
db.session.commit()
return tok
def _generate_token(self):
return hashlib.sha1("app:" + str(self.client_id) + \
":user:" + str(self.user_id) + \
str(hexlify(OpenSSL.rand.bytes(10)))).hexdigest()
def _generate_refresh_token(self):
return hashlib.sha1("app:" + str(self.client_id) + \
":user:" + str(self.user_id) + \
":access_token:" + str(self.id)).hexdigest()
def contains_scope(scope):
return scope in self.scope.split(' ')
class Code(db.Model):
id = db.Column(db.Integer, primary_key=True)
client_id = db.Column(
db.String(40), db.ForeignKey('client.client_id'),
nullable=False,
)
client = db.relationship('Client')
user_id = db.Column(
db.Integer, db.ForeignKey('user.id')
)
user = db.relationship('User')
code = db.Column(db.String(40), unique=True)
_scopes = db.Column(db.Text)
expires = db.Column(db.DateTime)
created = db.Column(db.DateTime)
is_active = db.Column(db.Integer)
@property
def scopes(self):
if self._scopes:
return self._scopes.split()
return []
def generate_new_code(self, client_id):
return hashlib.sha1("secret:" + client_id + ":req:" + \
str(hexlify(OpenSSL.rand.bytes(10)))).hexdigest()
def get_device_code(self):
# NOQA
return hmac.new(OUR_KEY, "secret:" + \
# NOQA
str(self.id), hashlib.sha1).hexdigest()
def exchange_for_access_token(self, app):
return Token().create_access_token(app.client_id,
app.user_id,
app.scopes,
"grant_auth_code")
def current_user():
if 'id' in session:
uid = session['id']
return User.query.get(uid)
if 'id' in request.args:
return User.query.get(request.args.get('id'))
return None
@oauth.clientgetter
def load_client(client_id):
return Client.query.filter_by(client_id=client_id).first()
@oauth.authcodesetter
def save_auth_code(code, client_id, user_id, *args, **kwargs):
codes = Code.query.filter_by(
client_id=client_id,
user_id=user_id
)
# make sure that every client has only one code connected to a user
for c in codes:
db.session.delete(c)
expires_in = (AUTH_EXPIRATION_TIME if code is None else \
code.pop('expires_in'))
expires = datetime.utcnow() + timedelta(seconds=expires_in)
created = datetime.utcnow()
cod = Code(
client_id=client_id,
user_id=user_id,
code=(None if code is None else code['code']),
_scopes=('public private' if code is None else code['scope']),
expires=expires,
created=created,
is_active=0
)
if cod.code is None:
cod.code = cod.generate_new_code(cod.client_id)[:8]
db.session.add(cod)
db.session.commit()
return cod
@oauth.authcodegetter
def load_auth_code(code):
return Code.query.filter_by(code=code).first()
if __name__ == "__main__":
db.create_all()
app.run(debug=True)
| mit |
gem/oq-hazardlib | openquake/hazardlib/tests/gsim/fukushima_tanaka_1990_test.py | 1 | 1631 | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2013-2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from openquake.hazardlib.gsim.fukushima_tanaka_1990 import (
FukushimaTanaka1990,
FukushimaTanakaSite1990
)
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
class FukushimaTanaka1990TestCase(BaseGSIMTestCase):
GSIM_CLASS = FukushimaTanaka1990
def test_mean(self):
self.check('FT1990/FT1990_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('FT1990/FT1990_STDTOTAL.csv',
max_discrep_percentage=0.1)
class FukushimaTanaka1990SiteTestCase(BaseGSIMTestCase):
GSIM_CLASS = FukushimaTanakaSite1990
def test_mean(self):
self.check('FT1990/FT1990Site_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('FT1990/FT1990Site_STDTOTAL.csv',
max_discrep_percentage=0.1)
| agpl-3.0 |
geekboxzone/lollipop_external_chromium_org_third_party_WebKit | Tools/Scripts/webkitpy/tool/steps/options.py | 44 | 2269 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from optparse import make_option
class Options(object):
confirm = make_option("--no-confirm", action="store_false", dest="confirm", default=True, help="Skip confirmation steps.")
git_commit = make_option("-g", "--git-commit", action="store", dest="git_commit", help="Operate on a local commit. If a range, the commits are squashed into one. <ref>.... includes the working copy changes. UPSTREAM can be used for the upstream/tracking branch.")
parent_command = make_option("--parent-command", action="store", dest="parent_command", default=None, help="(Internal) The command that spawned this instance.")
quiet = make_option("--quiet", action="store_true", dest="quiet", default=False, help="Produce less console output.")
| bsd-3-clause |
YouthAndra/apprtc | src/third_party/httplib2/socks.py | 811 | 18459 | """SocksiPy - Python SOCKS module.
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
"""
"""
Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
for use in PyLoris (http://pyloris.sourceforge.net/)
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
mainly to merge bug fixes found in Sourceforge
"""
import base64
import socket
import struct
import sys
if getattr(socket, 'socket', None) is None:
raise ImportError('socket.socket missing, proxy support unusable')
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
PROXY_TYPE_HTTP_NO_TUNNEL = 4
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(Exception): pass
class GeneralProxyError(ProxyError): pass
class Socks5AuthError(ProxyError): pass
class Socks5Error(ProxyError): pass
class Socks4Error(ProxyError): pass
class HTTPError(ProxyError): pass
_generalerrors = ("success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
"bad input")
_socks5errors = ("succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
"Host unreachable",
"Connection refused",
"TTL expired",
"Command not supported",
"Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
"unknown error")
_socks4errors = ("request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
def wrapmodule(module):
"""wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
module.socket.socket = socksocket
else:
raise GeneralProxyError((4, "no proxy specified"))
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
self.__httptunnel = True
def __recvall(self, count):
"""__recvall(count) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = self.recv(count)
while len(data) < count:
d = self.recv(count-len(data))
if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
data = data + d
return data
def sendall(self, content, *args):
""" override socket.socket.sendall method to rewrite the header
for non-tunneling proxies if needed
"""
if not self.__httptunnel:
content = self.__rewriteproxy(content)
return super(socksocket, self).sendall(content, *args)
def __rewriteproxy(self, header):
""" rewrite HTTP request headers to support non-tunneling proxies
(i.e. those which do not support the CONNECT method).
This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
"""
host, endpt = None, None
hdrs = header.split("\r\n")
for hdr in hdrs:
if hdr.lower().startswith("host:"):
host = hdr
elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
endpt = hdr
if host and endpt:
hdrs.remove(host)
hdrs.remove(endpt)
host = host.split(" ")[1]
endpt = endpt.split(" ")
if (self.__proxy[4] != None and self.__proxy[5] != None):
hdrs.insert(0, self.__getauthheader())
hdrs.insert(0, "Host: %s" % host)
hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
return "\r\n".join(hdrs)
def __getauthheader(self):
auth = self.__proxy[4] + ":" + self.__proxy[5]
return "Proxy-Authorization: Basic " + base64.b64encode(auth)
def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password)
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1:2] == chr(0x00).encode():
# No authentication is required
pass
elif chosenauth[1:2] == chr(0x02).encode():
# Okay, we need to perform a basic username/password
# authentication.
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0:1] != chr(0x01).encode():
# Bad response
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if authstat[1:2] != chr(0x00).encode():
# Authentication failed
self.close()
raise Socks5AuthError((3, _socks5autherrors[3]))
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == chr(0xFF).encode():
raise Socks5AuthError((2, _socks5autherrors[2]))
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
req = struct.pack('BBB', 0x05, 0x01, 0x00)
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + chr(0x01).encode() + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + chr(0x01).encode() + ipaddr
req = req + struct.pack(">H", destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
elif resp[1:2] != chr(0x00).encode():
# Connection failed
self.close()
if ord(resp[1:2])<=8:
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
else:
raise Socks5Error((9, _socks5errors[9]))
# Get the bound address/port
elif resp[3:4] == chr(0x01).encode():
boundaddr = self.__recvall(4)
elif resp[3:4] == chr(0x03).encode():
resp = resp + self.recv(1)
boundaddr = self.__recvall(ord(resp[4:5]))
else:
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self,destaddr,destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]:
ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] != None:
req = req + self.__proxy[4]
req = req + chr(0x00).encode()
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv:
req = req + destaddr + chr(0x00).encode()
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0:1] != chr(0x00).encode():
# Bad data
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if resp[1:2] != chr(0x5A).encode():
# Server returned an error
self.close()
if ord(resp[1:2]) in (91, 92, 93):
self.close()
raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def __negotiatehttp(self, destaddr, destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if not self.__proxy[3]:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
headers += ["Host: ", destaddr, "\r\n"]
if (self.__proxy[4] != None and self.__proxy[5] != None):
headers += [self.__getauthheader(), "\r\n"]
headers.append("\r\n")
self.sendall("".join(headers).encode())
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n".encode()) == -1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ".encode(), 2)
if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode, statusline[2]))
self.__proxysockname = ("0.0.0.0", 0)
self.__proxypeername = (addr, destport)
def connect(self, destpair):
"""connect(self, despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks5(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1],portnum))
if destpair[1] == 443:
self.__negotiatehttp(destpair[0],destpair[1])
else:
self.__httptunnel = False
elif self.__proxy[0] == None:
_orgsocket.connect(self, (destpair[0], destpair[1]))
else:
raise GeneralProxyError((4, _generalerrors[4]))
| bsd-3-clause |
sghai/robottelo | robottelo/api/assertions.py | 4 | 2440 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from robottelo.test import AssertApiNotRaisesContextManager
def assert_api_not_raises(expected_exception, callable_obj=None,
expected_value=None, value_handler=None, *args,
**kwargs):
"""Fail if an exception of class expected_exception is raised by
callableObj when invoked with specified positional and keyword
arguments. If a different type of exception is raised, it will not be
caught, and the test case will be deemed to have suffered an error,
exactly as for an unexpected exception.
If called with callableObj omitted or None, will return a context
object used like this::
with assert_api_not_raises(SomeException):
do_something()
The context manager keeps a reference to the exception as the
'exception' attribute. This allows you to inspect the exception after
the assertion::
with assert_api_not_raises(SomeException) as cm:
do_something()
the_exception = cm.exception
assert the_exception.error_code == 1
In addition, optional 'http_status_code' arg may
be passed. This allows to specify exact HTTP status code, returned by
``requests.HTTPError``, which should be validated. In such case only
expected exception with expected response code will be caught.
"""
context = AssertApiNotRaisesContextManager(
expected_exception,
expected_value=expected_value,
value_handler=value_handler,
)
if callable_obj is None:
return context
with context:
callable_obj(*args, **kwargs)
def assert_api_not_raises_regex(expected_exception, expected_regex,
callable_obj=None, expected_value=None,
value_handler=None, *args, **kwargs):
"""Fail if an exception of class expected_exception is raised and the
message in the exception matches a regex.
"""
if expected_regex is not None:
expected_regex = re.compile(expected_regex)
context = AssertApiNotRaisesContextManager(
expected_exception,
expected_regex=expected_regex,
expected_value=expected_value,
value_handler=value_handler,
)
if callable_obj is None:
return context
with context:
callable_obj(*args, **kwargs)
| gpl-3.0 |
pombredanne/MOG | nova/api/openstack/compute/images.py | 15 | 7373 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack import common
from nova.api.openstack.compute.views import images as views_images
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
import nova.image.glance
from nova.openstack.common.gettextutils import _
import nova.utils
SUPPORTED_FILTERS = {
'name': 'name',
'status': 'status',
'changes-since': 'changes-since',
'server': 'property-instance_uuid',
'type': 'property-image_type',
'minRam': 'min_ram',
'minDisk': 'min_disk',
}
def make_image(elem, detailed=False):
elem.set('name')
elem.set('id')
if detailed:
elem.set('updated')
elem.set('created')
elem.set('status')
elem.set('progress')
elem.set('minRam')
elem.set('minDisk')
server = xmlutil.SubTemplateElement(elem, 'server', selector='server')
server.set('id')
xmlutil.make_links(server, 'links')
elem.append(common.MetadataTemplate())
xmlutil.make_links(elem, 'links')
image_nsmap = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
class ImageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('image', selector='image')
make_image(root, detailed=True)
return xmlutil.MasterTemplate(root, 1, nsmap=image_nsmap)
class MinimalImagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('images')
elem = xmlutil.SubTemplateElement(root, 'image', selector='images')
make_image(elem)
xmlutil.make_links(root, 'images_links')
return xmlutil.MasterTemplate(root, 1, nsmap=image_nsmap)
class ImagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('images')
elem = xmlutil.SubTemplateElement(root, 'image', selector='images')
make_image(elem, detailed=True)
return xmlutil.MasterTemplate(root, 1, nsmap=image_nsmap)
class Controller(wsgi.Controller):
"""Base controller for retrieving/displaying images."""
_view_builder_class = views_images.ViewBuilder
def __init__(self, image_service=None, **kwargs):
"""Initialize new `ImageController`.
:param image_service: `nova.image.glance:GlanceImageService`
"""
super(Controller, self).__init__(**kwargs)
self._image_service = (image_service or
nova.image.glance.get_default_image_service())
def _get_filters(self, req):
"""
Return a dictionary of query param filters from the request
:param req: the Request object coming from the wsgi layer
:retval a dict of key/value filters
"""
filters = {}
for param in req.params:
if param in SUPPORTED_FILTERS or param.startswith('property-'):
# map filter name or carry through if property-*
filter_name = SUPPORTED_FILTERS.get(param, param)
filters[filter_name] = req.params.get(param)
# ensure server filter is the instance uuid
filter_name = 'property-instance_uuid'
try:
filters[filter_name] = filters[filter_name].rsplit('/', 1)[1]
except (AttributeError, IndexError, KeyError):
pass
filter_name = 'status'
if filter_name in filters:
# The Image API expects us to use lowercase strings for status
filters[filter_name] = filters[filter_name].lower()
return filters
@wsgi.serializers(xml=ImageTemplate)
def show(self, req, id):
"""Return detailed information about a specific image.
:param req: `wsgi.Request` object
:param id: Image identifier
"""
context = req.environ['nova.context']
try:
image = self._image_service.show(context, id)
except (exception.NotFound, exception.InvalidImageRef):
explanation = _("Image not found.")
raise webob.exc.HTTPNotFound(explanation=explanation)
req.cache_db_items('images', [image], 'id')
return self._view_builder.show(req, image)
def delete(self, req, id):
"""Delete an image, if allowed.
:param req: `wsgi.Request` object
:param id: Image identifier (integer)
"""
context = req.environ['nova.context']
try:
self._image_service.delete(context, id)
except exception.ImageNotFound:
explanation = _("Image not found.")
raise webob.exc.HTTPNotFound(explanation=explanation)
except exception.ImageNotAuthorized:
# The image service raises this exception on delete if glanceclient
# raises HTTPForbidden.
explanation = _("You are not allowed to delete the image.")
raise webob.exc.HTTPForbidden(explanation=explanation)
return webob.exc.HTTPNoContent()
@wsgi.serializers(xml=MinimalImagesTemplate)
def index(self, req):
"""Return an index listing of images available to the request.
:param req: `wsgi.Request` object
"""
context = req.environ['nova.context']
filters = self._get_filters(req)
params = req.GET.copy()
page_params = common.get_pagination_params(req)
for key, val in page_params.iteritems():
params[key] = val
try:
images = self._image_service.detail(context, filters=filters,
**page_params)
except exception.Invalid as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
return self._view_builder.index(req, images)
@wsgi.serializers(xml=ImagesTemplate)
def detail(self, req):
"""Return a detailed index listing of images available to the request.
:param req: `wsgi.Request` object.
"""
context = req.environ['nova.context']
filters = self._get_filters(req)
params = req.GET.copy()
page_params = common.get_pagination_params(req)
for key, val in page_params.iteritems():
params[key] = val
try:
images = self._image_service.detail(context, filters=filters,
**page_params)
except exception.Invalid as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
req.cache_db_items('images', images, 'id')
return self._view_builder.detail(req, images)
def create(self, *args, **kwargs):
raise webob.exc.HTTPMethodNotAllowed()
def create_resource():
return wsgi.Resource(Controller())
| apache-2.0 |
dannyperry571/theapprentice | script.module.unidecode/lib/unidecode/x070.py | 252 | 4693 | data = (
'You ', # 0x00
'Yang ', # 0x01
'Lu ', # 0x02
'Si ', # 0x03
'Jie ', # 0x04
'Ying ', # 0x05
'Du ', # 0x06
'Wang ', # 0x07
'Hui ', # 0x08
'Xie ', # 0x09
'Pan ', # 0x0a
'Shen ', # 0x0b
'Biao ', # 0x0c
'Chan ', # 0x0d
'Mo ', # 0x0e
'Liu ', # 0x0f
'Jian ', # 0x10
'Pu ', # 0x11
'Se ', # 0x12
'Cheng ', # 0x13
'Gu ', # 0x14
'Bin ', # 0x15
'Huo ', # 0x16
'Xian ', # 0x17
'Lu ', # 0x18
'Qin ', # 0x19
'Han ', # 0x1a
'Ying ', # 0x1b
'Yong ', # 0x1c
'Li ', # 0x1d
'Jing ', # 0x1e
'Xiao ', # 0x1f
'Ying ', # 0x20
'Sui ', # 0x21
'Wei ', # 0x22
'Xie ', # 0x23
'Huai ', # 0x24
'Hao ', # 0x25
'Zhu ', # 0x26
'Long ', # 0x27
'Lai ', # 0x28
'Dui ', # 0x29
'Fan ', # 0x2a
'Hu ', # 0x2b
'Lai ', # 0x2c
'[?] ', # 0x2d
'[?] ', # 0x2e
'Ying ', # 0x2f
'Mi ', # 0x30
'Ji ', # 0x31
'Lian ', # 0x32
'Jian ', # 0x33
'Ying ', # 0x34
'Fen ', # 0x35
'Lin ', # 0x36
'Yi ', # 0x37
'Jian ', # 0x38
'Yue ', # 0x39
'Chan ', # 0x3a
'Dai ', # 0x3b
'Rang ', # 0x3c
'Jian ', # 0x3d
'Lan ', # 0x3e
'Fan ', # 0x3f
'Shuang ', # 0x40
'Yuan ', # 0x41
'Zhuo ', # 0x42
'Feng ', # 0x43
'She ', # 0x44
'Lei ', # 0x45
'Lan ', # 0x46
'Cong ', # 0x47
'Qu ', # 0x48
'Yong ', # 0x49
'Qian ', # 0x4a
'Fa ', # 0x4b
'Guan ', # 0x4c
'Que ', # 0x4d
'Yan ', # 0x4e
'Hao ', # 0x4f
'Hyeng ', # 0x50
'Sa ', # 0x51
'Zan ', # 0x52
'Luan ', # 0x53
'Yan ', # 0x54
'Li ', # 0x55
'Mi ', # 0x56
'Shan ', # 0x57
'Tan ', # 0x58
'Dang ', # 0x59
'Jiao ', # 0x5a
'Chan ', # 0x5b
'[?] ', # 0x5c
'Hao ', # 0x5d
'Ba ', # 0x5e
'Zhu ', # 0x5f
'Lan ', # 0x60
'Lan ', # 0x61
'Nang ', # 0x62
'Wan ', # 0x63
'Luan ', # 0x64
'Xun ', # 0x65
'Xian ', # 0x66
'Yan ', # 0x67
'Gan ', # 0x68
'Yan ', # 0x69
'Yu ', # 0x6a
'Huo ', # 0x6b
'Si ', # 0x6c
'Mie ', # 0x6d
'Guang ', # 0x6e
'Deng ', # 0x6f
'Hui ', # 0x70
'Xiao ', # 0x71
'Xiao ', # 0x72
'Hu ', # 0x73
'Hong ', # 0x74
'Ling ', # 0x75
'Zao ', # 0x76
'Zhuan ', # 0x77
'Jiu ', # 0x78
'Zha ', # 0x79
'Xie ', # 0x7a
'Chi ', # 0x7b
'Zhuo ', # 0x7c
'Zai ', # 0x7d
'Zai ', # 0x7e
'Can ', # 0x7f
'Yang ', # 0x80
'Qi ', # 0x81
'Zhong ', # 0x82
'Fen ', # 0x83
'Niu ', # 0x84
'Jiong ', # 0x85
'Wen ', # 0x86
'Po ', # 0x87
'Yi ', # 0x88
'Lu ', # 0x89
'Chui ', # 0x8a
'Pi ', # 0x8b
'Kai ', # 0x8c
'Pan ', # 0x8d
'Yan ', # 0x8e
'Kai ', # 0x8f
'Pang ', # 0x90
'Mu ', # 0x91
'Chao ', # 0x92
'Liao ', # 0x93
'Gui ', # 0x94
'Kang ', # 0x95
'Tun ', # 0x96
'Guang ', # 0x97
'Xin ', # 0x98
'Zhi ', # 0x99
'Guang ', # 0x9a
'Guang ', # 0x9b
'Wei ', # 0x9c
'Qiang ', # 0x9d
'[?] ', # 0x9e
'Da ', # 0x9f
'Xia ', # 0xa0
'Zheng ', # 0xa1
'Zhu ', # 0xa2
'Ke ', # 0xa3
'Zhao ', # 0xa4
'Fu ', # 0xa5
'Ba ', # 0xa6
'Duo ', # 0xa7
'Duo ', # 0xa8
'Ling ', # 0xa9
'Zhuo ', # 0xaa
'Xuan ', # 0xab
'Ju ', # 0xac
'Tan ', # 0xad
'Pao ', # 0xae
'Jiong ', # 0xaf
'Pao ', # 0xb0
'Tai ', # 0xb1
'Tai ', # 0xb2
'Bing ', # 0xb3
'Yang ', # 0xb4
'Tong ', # 0xb5
'Han ', # 0xb6
'Zhu ', # 0xb7
'Zha ', # 0xb8
'Dian ', # 0xb9
'Wei ', # 0xba
'Shi ', # 0xbb
'Lian ', # 0xbc
'Chi ', # 0xbd
'Huang ', # 0xbe
'[?] ', # 0xbf
'Hu ', # 0xc0
'Shuo ', # 0xc1
'Lan ', # 0xc2
'Jing ', # 0xc3
'Jiao ', # 0xc4
'Xu ', # 0xc5
'Xing ', # 0xc6
'Quan ', # 0xc7
'Lie ', # 0xc8
'Huan ', # 0xc9
'Yang ', # 0xca
'Xiao ', # 0xcb
'Xiu ', # 0xcc
'Xian ', # 0xcd
'Yin ', # 0xce
'Wu ', # 0xcf
'Zhou ', # 0xd0
'Yao ', # 0xd1
'Shi ', # 0xd2
'Wei ', # 0xd3
'Tong ', # 0xd4
'Xue ', # 0xd5
'Zai ', # 0xd6
'Kai ', # 0xd7
'Hong ', # 0xd8
'Luo ', # 0xd9
'Xia ', # 0xda
'Zhu ', # 0xdb
'Xuan ', # 0xdc
'Zheng ', # 0xdd
'Po ', # 0xde
'Yan ', # 0xdf
'Hui ', # 0xe0
'Guang ', # 0xe1
'Zhe ', # 0xe2
'Hui ', # 0xe3
'Kao ', # 0xe4
'[?] ', # 0xe5
'Fan ', # 0xe6
'Shao ', # 0xe7
'Ye ', # 0xe8
'Hui ', # 0xe9
'[?] ', # 0xea
'Tang ', # 0xeb
'Jin ', # 0xec
'Re ', # 0xed
'[?] ', # 0xee
'Xi ', # 0xef
'Fu ', # 0xf0
'Jiong ', # 0xf1
'Che ', # 0xf2
'Pu ', # 0xf3
'Jing ', # 0xf4
'Zhuo ', # 0xf5
'Ting ', # 0xf6
'Wan ', # 0xf7
'Hai ', # 0xf8
'Peng ', # 0xf9
'Lang ', # 0xfa
'Shan ', # 0xfb
'Hu ', # 0xfc
'Feng ', # 0xfd
'Chi ', # 0xfe
'Rong ', # 0xff
)
| gpl-2.0 |
WilliamWickerson/MLProject | fastNetwork.py | 1 | 7332 | from random import uniform
import numpy
def sigmoidFunction(x):
try:
ret = 1 / (1 + numpy.exp(-x))
except OverflowError:
ret = 0
return ret
def softMax(array):
exp = [numpy.exp(x) for x in array]
return numpy.array([x / sum(exp) for x in exp])
global sigmoid
sigmoid = numpy.vectorize(sigmoidFunction)
class FastNetwork:
def __init__(self, rowSizes, learningRate=1, softmax=True):
self.weights = list()
self.learningRate = learningRate
self.softmax = softmax
#Rates initialized according to:
#http://datascience.stackexchange.com/questions/10926/how-to-deep-neural-network-weight-initialization
for i in range(len(rowSizes)):
if i == 0:
r = numpy.sqrt(6 / (1 + rowSizes[i + 1]))
elif i == len(rowSizes) - 1:
r = numpy.sqrt(6 / (rowSizes[i - 1] + 1))
else:
r = numpy.sqrt(6 / (rowSizes[i - 1] + rowSizes[i + 1]))
if i < len(rowSizes) - 1:
tempArray = numpy.array([uniform(-r, r) for x in range(rowSizes[i]*(rowSizes[i+1] + 1))])
tempArray = numpy.reshape(tempArray, (rowSizes[i], rowSizes[i+1] + 1))
else:
tempArray = numpy.array([uniform(-r, r) for x in range(rowSizes[i]*2)])
tempArray = numpy.reshape(tempArray, (rowSizes[i], 2))
self.weights.append(tempArray)
def startFromFileOld(self, filename):
#Open files compatible with network.py's Network
with open(filename) as weightFile:
rows = weightFile.readlines()
rowData = [numpy.fromstring(row.strip()[2:-1], sep=' ') for row in rows]
assert len(rowData) == sum(matrix.shape[0] for matrix in self.weights)
for i in range(len(self.weights)):
size = self.weights[i].shape[0]
length = self.weights[i].shape[1]
assert all([len(row) == length for row in rowData[:size]])
newArray = numpy.stack(rowData[0:size])
self.weights[i] = newArray
rowData = rowData[size:]
def startFromFile(self, filename):
#Open files and overwrite weights
with open(filename) as weightFile:
weightStrings = weightFile.readlines()
assert len(weightStrings) == len(self.weights)
for i in range(len(weightStrings)):
weightString = weightStrings[i].strip()
weightArray = numpy.fromstring(weightString[2:-1], sep=' ')
assert weightArray.size == self.weights[i].size
weightArray = numpy.reshape(weightArray, self.weights[i].shape)
self.weights[i] = weightArray
def writeToFile(self, filename):
#Write all of the weights data to file
with open(filename, 'w') as weightFile:
numpy.set_printoptions(threshold = numpy.inf, linewidth = numpy.inf)
for matrix in self.weights:
printable = numpy.reshape(matrix, (numpy.product(matrix.shape)))
weightFile.write(numpy.array_str(printable) + "\n")
numpy.set_printoptions(threshold = 10, linewidth = 75)
def networkOutputs(self, inputs):
#Calculate the outputs for each row in the neural network
assert len(inputs) == self.weights[len(self.weights) - 1].shape[0]
outputs = list()
for i in reversed(range(len(self.weights))):
#Input Layer
if i == len(self.weights) - 1:
inputArray = numpy.array(inputs)
inputArray = numpy.reshape(inputArray, (len(inputs), 1))
onesArray = numpy.ones((len(inputs), 1))
inputArray = numpy.concatenate((inputArray, onesArray), axis=1)
#Row-wise dot product of inputs and weights
output = numpy.einsum('ij, ij->i', self.weights[i], inputArray)
output = sigmoid(output)
outputs.append(output)
#Otherwise
else:
inputArray = numpy.array(numpy.concatenate((outputs[0], [1])))
#Matrix multiplication of weights and input vector
output = self.weights[i] @ inputArray
if i == 0 and self.softmax:
output = softMax(output)
else:
output = sigmoid(output)
outputs.insert(0, output)
return outputs
def classify(self, inputs):
#Return the most probable output
outputs = self.networkOutputs(inputs)
maxValue = max(outputs[0])
maxIndex = outputs[0].tolist().index(maxValue)
return maxIndex, maxValue
def backPropagate(self, inputs, targets):
outputs = self.networkOutputs(inputs)
targets = numpy.array(targets)
inputs = numpy.array(inputs)
deltas = list()
changes = list()
#Back propagate the error
for i in range(len(self.weights)):
#Output layer error and change
if i == 0:
if self.softmax:
error = targets - outputs[i]
delta = error
else:
error = targets - outputs[i]
#delta = error * outputs * (1 - outputs)
delta = error * outputs[i] * (numpy.ones((self.weights[i].shape[0])) - outputs[i])
deltas.append(delta)
change = numpy.outer((self.learningRate * deltas[i]), numpy.array(numpy.concatenate((outputs[i+1], [1]))))
changes.append(change)
#Input layer error and change
elif i == len(self.weights) - 1:
error = numpy.dot(deltas[i - 1], self.weights[i - 1][:,:-1])
delta = error * outputs[i] * (numpy.ones((self.weights[i].shape[0])) - outputs[i])
deltas.append(delta)
doubleDelta = numpy.stack((delta, delta))
inputArray = numpy.stack((inputs, numpy.ones(self.weights[i].shape[0])))
change = numpy.transpose(doubleDelta * inputArray)
changes.append(change)
#Hidden layer error and change
else:
error = numpy.dot(deltas[i - 1], self.weights[i - 1][:,:-1])
delta = error * outputs[i] * (numpy.ones((self.weights[i].shape[0])) - outputs[i])
deltas.append(delta)
change = numpy.outer((self.learningRate * deltas[i]), numpy.array(numpy.concatenate((outputs[i+1], [1]))))
changes.append(change)
#Update the weights matrices
for i in range(len(self.weights)):
self.weights[i] += changes[i]
"""
numpy.set_printoptions(threshold = numpy.inf, linewidth = numpy.inf)
network = FastNetwork([40,100,161])
#network.writeToFile("test.txt")
#network.startFromFile("test.txt")
network.startFromFileOld("old.txt")
outputs = network.networkOutputs([1]*161)
print(outputs[0])
network.backPropagate([1]*161, [0] + [1] + [0]*38)
import time
start = time.time()
for i in range(1000):
network.backPropagate([1]*161, [0] + [1] + [0]*38)
print(time.time() - start)
"""
"""
print(network.classify([1]*161))
print(network.networkOutputs([1]*161)[0])
"""
| mit |
nazo/ansible | lib/ansible/modules/cloud/amazon/ec2_vpc_route_table_facts.py | 62 | 4169 | #!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_vpc_route_table_facts
short_description: Gather facts about ec2 VPC route tables in AWS
description:
- Gather facts about ec2 VPC route tables in AWS
version_added: "2.0"
author: "Rob White (@wimnat)"
options:
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value.
See U(http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeRouteTables.html) for possible filters.
required: false
default: null
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Gather facts about all VPC route tables
- ec2_vpc_route_table_facts:
# Gather facts about a particular VPC route table using route table ID
- ec2_vpc_route_table_facts:
filters:
route-table-id: rtb-00112233
# Gather facts about any VPC route table with a tag key Name and value Example
- ec2_vpc_route_table_facts:
filters:
"tag:Name": Example
# Gather facts about any VPC route table within VPC with ID vpc-abcdef00
- ec2_vpc_route_table_facts:
filters:
vpc-id: vpc-abcdef00
'''
try:
import boto.vpc
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info
def get_route_table_info(route_table):
# Add any routes to array
routes = []
associations = []
for route in route_table.routes:
routes.append(route.__dict__)
for association in route_table.associations:
associations.append(association.__dict__)
route_table_info = {'id': route_table.id,
'routes': routes,
'associations': associations,
'tags': route_table.tags,
'vpc_id': route_table.vpc_id
}
return route_table_info
def list_ec2_vpc_route_tables(connection, module):
filters = module.params.get("filters")
route_table_dict_array = []
try:
all_route_tables = connection.get_all_route_tables(filters=filters)
except BotoServerError as e:
module.fail_json(msg=e.message)
for route_table in all_route_tables:
route_table_dict_array.append(get_route_table_info(route_table))
module.exit_json(route_tables=route_table_dict_array)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
filters=dict(default=None, type='dict')
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.vpc, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
list_ec2_vpc_route_tables(connection, module)
if __name__ == '__main__':
main()
| gpl-3.0 |
brianlsharp/MissionPlanner | Lib/threading.py | 44 | 33709 | """Thread module emulating a subset of Java's threading model."""
import sys as _sys
try:
import thread
except ImportError:
del _sys.modules[__name__]
raise
import warnings
from time import time as _time, sleep as _sleep
from traceback import format_exc as _format_exc
from collections import deque
# Note regarding PEP 8 compliant aliases
# This threading model was originally inspired by Java, and inherited
# the convention of camelCase function and method names from that
# language. While those names are not in any imminent danger of being
# deprecated, starting with Python 2.6, the module now provides a
# PEP 8 compliant alias for any such method name.
# Using the new PEP 8 compliant names also facilitates substitution
# with the multiprocessing module, which doesn't provide the old
# Java inspired names.
# Rename some stuff so "from threading import *" is safe
__all__ = ['activeCount', 'active_count', 'Condition', 'currentThread',
'current_thread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
'Timer', 'setprofile', 'settrace', 'local', 'stack_size']
_start_new_thread = thread.start_new_thread
_allocate_lock = thread.allocate_lock
_get_ident = thread.get_ident
ThreadError = thread.error
del thread
# sys.exc_clear is used to work around the fact that except blocks
# don't fully clear the exception until 3.0.
warnings.filterwarnings('ignore', category=DeprecationWarning,
module='threading', message='sys.exc_clear')
# Debug support (adapted from ihooks.py).
# All the major classes here derive from _Verbose. We force that to
# be a new-style class so that all the major classes here are new-style.
# This helps debugging (type(instance) is more revealing for instances
# of new-style classes).
_VERBOSE = False
if __debug__:
class _Verbose(object):
def __init__(self, verbose=None):
if verbose is None:
verbose = _VERBOSE
self.__verbose = verbose
def _note(self, format, *args):
if self.__verbose:
format = format % args
# Issue #4188: calling current_thread() can incur an infinite
# recursion if it has to create a DummyThread on the fly.
ident = _get_ident()
try:
name = _active[ident].name
except KeyError:
name = "<OS thread %d>" % ident
format = "%s: %s\n" % (name, format)
_sys.stderr.write(format)
else:
# Disable this when using "python -O"
class _Verbose(object):
def __init__(self, verbose=None):
pass
def _note(self, *args):
pass
# Support for profile and trace hooks
_profile_hook = None
_trace_hook = None
def setprofile(func):
global _profile_hook
_profile_hook = func
def settrace(func):
global _trace_hook
_trace_hook = func
# Synchronization classes
Lock = _allocate_lock
def RLock(*args, **kwargs):
return _RLock(*args, **kwargs)
class _RLock(_Verbose):
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__block = _allocate_lock()
self.__owner = None
self.__count = 0
def __repr__(self):
owner = self.__owner
try:
owner = _active[owner].name
except KeyError:
pass
return "<%s owner=%r count=%d>" % (
self.__class__.__name__, owner, self.__count)
def acquire(self, blocking=1):
me = _get_ident()
if self.__owner == me:
self.__count = self.__count + 1
if __debug__:
self._note("%s.acquire(%s): recursive success", self, blocking)
return 1
rc = self.__block.acquire(blocking)
if rc:
self.__owner = me
self.__count = 1
if __debug__:
self._note("%s.acquire(%s): initial success", self, blocking)
else:
if __debug__:
self._note("%s.acquire(%s): failure", self, blocking)
return rc
__enter__ = acquire
def release(self):
if self.__owner != _get_ident():
raise RuntimeError("cannot release un-acquired lock")
self.__count = count = self.__count - 1
if not count:
self.__owner = None
self.__block.release()
if __debug__:
self._note("%s.release(): final release", self)
else:
if __debug__:
self._note("%s.release(): non-final release", self)
def __exit__(self, t, v, tb):
self.release()
# Internal methods used by condition variables
def _acquire_restore(self, count_owner):
count, owner = count_owner
self.__block.acquire()
self.__count = count
self.__owner = owner
if __debug__:
self._note("%s._acquire_restore()", self)
def _release_save(self):
if __debug__:
self._note("%s._release_save()", self)
count = self.__count
self.__count = 0
owner = self.__owner
self.__owner = None
self.__block.release()
return (count, owner)
def _is_owned(self):
return self.__owner == _get_ident()
def Condition(*args, **kwargs):
return _Condition(*args, **kwargs)
class _Condition(_Verbose):
def __init__(self, lock=None, verbose=None):
_Verbose.__init__(self, verbose)
if lock is None:
lock = RLock()
self.__lock = lock
# Export the lock's acquire() and release() methods
self.acquire = lock.acquire
self.release = lock.release
# If the lock defines _release_save() and/or _acquire_restore(),
# these override the default implementations (which just call
# release() and acquire() on the lock). Ditto for _is_owned().
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self.__waiters = []
def __enter__(self):
return self.__lock.__enter__()
def __exit__(self, *args):
return self.__lock.__exit__(*args)
def __repr__(self):
return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
def _release_save(self):
self.__lock.release() # No state to save
def _acquire_restore(self, x):
self.__lock.acquire() # Ignore saved state
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if __lock doesn't have _is_owned().
if self.__lock.acquire(0):
self.__lock.release()
return False
else:
return True
def wait(self, timeout=None):
if not self._is_owned():
raise RuntimeError("cannot wait on un-acquired lock")
waiter = _allocate_lock()
waiter.acquire()
self.__waiters.append(waiter)
saved_state = self._release_save()
try: # restore state no matter what (e.g., KeyboardInterrupt)
if timeout is None:
waiter.acquire()
if __debug__:
self._note("%s.wait(): got it", self)
else:
# Balancing act: We can't afford a pure busy loop, so we
# have to sleep; but if we sleep the whole timeout time,
# we'll be unresponsive. The scheme here sleeps very
# little at first, longer as time goes on, but never longer
# than 20 times per second (or the timeout time remaining).
endtime = _time() + timeout
delay = 0.0005 # 500 us -> initial delay of 1 ms
while True:
gotit = waiter.acquire(0)
if gotit:
break
remaining = endtime - _time()
if remaining <= 0:
break
delay = min(delay * 2, remaining, .05)
_sleep(delay)
if not gotit:
if __debug__:
self._note("%s.wait(%s): timed out", self, timeout)
try:
self.__waiters.remove(waiter)
except ValueError:
pass
else:
if __debug__:
self._note("%s.wait(%s): got it", self, timeout)
finally:
self._acquire_restore(saved_state)
def notify(self, n=1):
if not self._is_owned():
raise RuntimeError("cannot notify on un-acquired lock")
__waiters = self.__waiters
waiters = __waiters[:n]
if not waiters:
if __debug__:
self._note("%s.notify(): no waiters", self)
return
self._note("%s.notify(): notifying %d waiter%s", self, n,
n!=1 and "s" or "")
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notifyAll(self):
self.notify(len(self.__waiters))
notify_all = notifyAll
def Semaphore(*args, **kwargs):
return _Semaphore(*args, **kwargs)
class _Semaphore(_Verbose):
# After Tim Peters' semaphore class, but not quite the same (no maximum)
def __init__(self, value=1, verbose=None):
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__value = value
def acquire(self, blocking=1):
rc = False
self.__cond.acquire()
while self.__value == 0:
if not blocking:
break
if __debug__:
self._note("%s.acquire(%s): blocked waiting, value=%s",
self, blocking, self.__value)
self.__cond.wait()
else:
self.__value = self.__value - 1
if __debug__:
self._note("%s.acquire: success, value=%s",
self, self.__value)
rc = True
self.__cond.release()
return rc
__enter__ = acquire
def release(self):
self.__cond.acquire()
self.__value = self.__value + 1
if __debug__:
self._note("%s.release: success, value=%s",
self, self.__value)
self.__cond.notify()
self.__cond.release()
def __exit__(self, t, v, tb):
self.release()
def BoundedSemaphore(*args, **kwargs):
return _BoundedSemaphore(*args, **kwargs)
class _BoundedSemaphore(_Semaphore):
"""Semaphore that checks that # releases is <= # acquires"""
def __init__(self, value=1, verbose=None):
_Semaphore.__init__(self, value, verbose)
self._initial_value = value
def release(self):
if self._Semaphore__value >= self._initial_value:
raise ValueError, "Semaphore released too many times"
return _Semaphore.release(self)
def Event(*args, **kwargs):
return _Event(*args, **kwargs)
class _Event(_Verbose):
# After Tim Peters' event class (without is_posted())
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__flag = False
def _reset_internal_locks(self):
# private! called by Thread._reset_internal_locks by _after_fork()
self.__cond.__init__()
def isSet(self):
return self.__flag
is_set = isSet
def set(self):
self.__cond.acquire()
try:
self.__flag = True
self.__cond.notify_all()
finally:
self.__cond.release()
def clear(self):
self.__cond.acquire()
try:
self.__flag = False
finally:
self.__cond.release()
def wait(self, timeout=None):
self.__cond.acquire()
try:
if not self.__flag:
self.__cond.wait(timeout)
return self.__flag
finally:
self.__cond.release()
# Helper to generate new thread names
_counter = 0
def _newname(template="Thread-%d"):
global _counter
_counter = _counter + 1
return template % _counter
# Active thread administration
_active_limbo_lock = _allocate_lock()
_active = {} # maps thread id to Thread object
_limbo = {}
# Main class for threads
class Thread(_Verbose):
__initialized = False
# Need to store a reference to sys.exc_info for printing
# out exceptions when a thread tries to use a global var. during interp.
# shutdown and thus raises an exception about trying to perform some
# operation on/with a NoneType
__exc_info = _sys.exc_info
# Keep sys.exc_clear too to clear the exception just before
# allowing .join() to return.
__exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None,
args=(), kwargs=None, verbose=None):
assert group is None, "group argument must be None for now"
_Verbose.__init__(self, verbose)
if kwargs is None:
kwargs = {}
self.__target = target
self.__name = str(name or _newname())
self.__args = args
self.__kwargs = kwargs
self.__daemonic = self._set_daemon()
self.__ident = None
self.__started = Event()
self.__stopped = False
self.__block = Condition(Lock())
self.__initialized = True
# sys.stderr is not stored in the class like
# sys.exc_info since it can be changed between instances
self.__stderr = _sys.stderr
def _reset_internal_locks(self):
# private! Called by _after_fork() to reset our internal locks as
# they may be in an invalid state leading to a deadlock or crash.
if hasattr(self, '_Thread__block'): # DummyThread deletes self.__block
self.__block.__init__()
self.__started._reset_internal_locks()
@property
def _block(self):
# used by a unittest
return self.__block
def _set_daemon(self):
# Overridden in _MainThread and _DummyThread
return current_thread().daemon
def __repr__(self):
assert self.__initialized, "Thread.__init__() was not called"
status = "initial"
if self.__started.is_set():
status = "started"
if self.__stopped:
status = "stopped"
if self.__daemonic:
status += " daemon"
if self.__ident is not None:
status += " %s" % self.__ident
return "<%s(%s, %s)>" % (self.__class__.__name__, self.__name, status)
def start(self):
if not self.__initialized:
raise RuntimeError("thread.__init__() not called")
if self.__started.is_set():
raise RuntimeError("threads can only be started once")
if __debug__:
self._note("%s.start(): starting thread", self)
with _active_limbo_lock:
_limbo[self] = self
try:
_start_new_thread(self.__bootstrap, ())
except Exception:
with _active_limbo_lock:
del _limbo[self]
raise
self.__started.wait()
def run(self):
try:
if self.__target:
self.__target(*self.__args, **self.__kwargs)
finally:
# Avoid a refcycle if the thread is running a function with
# an argument that has a member that points to the thread.
del self.__target, self.__args, self.__kwargs
def __bootstrap(self):
# Wrapper around the real bootstrap code that ignores
# exceptions during interpreter cleanup. Those typically
# happen when a daemon thread wakes up at an unfortunate
# moment, finds the world around it destroyed, and raises some
# random exception *** while trying to report the exception in
# __bootstrap_inner() below ***. Those random exceptions
# don't help anybody, and they confuse users, so we suppress
# them. We suppress them only when it appears that the world
# indeed has already been destroyed, so that exceptions in
# __bootstrap_inner() during normal business hours are properly
# reported. Also, we only suppress them for daemonic threads;
# if a non-daemonic encounters this, something else is wrong.
try:
self.__bootstrap_inner()
except:
if self.__daemonic and _sys is None:
return
raise
def _set_ident(self):
self.__ident = _get_ident()
def __bootstrap_inner(self):
try:
self._set_ident()
self.__started.set()
with _active_limbo_lock:
_active[self.__ident] = self
del _limbo[self]
if __debug__:
self._note("%s.__bootstrap(): thread started", self)
if _trace_hook:
self._note("%s.__bootstrap(): registering trace hook", self)
_sys.settrace(_trace_hook)
if _profile_hook:
self._note("%s.__bootstrap(): registering profile hook", self)
_sys.setprofile(_profile_hook)
try:
self.run()
except SystemExit:
if __debug__:
self._note("%s.__bootstrap(): raised SystemExit", self)
except:
if __debug__:
self._note("%s.__bootstrap(): unhandled exception", self)
# If sys.stderr is no more (most likely from interpreter
# shutdown) use self.__stderr. Otherwise still use sys (as in
# _sys) in case sys.stderr was redefined since the creation of
# self.
if _sys:
_sys.stderr.write("Exception in thread %s:\n%s\n" %
(self.name, _format_exc()))
else:
# Do the best job possible w/o a huge amt. of code to
# approximate a traceback (code ideas from
# Lib/traceback.py)
exc_type, exc_value, exc_tb = self.__exc_info()
try:
print>>self.__stderr, (
"Exception in thread " + self.name +
" (most likely raised during interpreter shutdown):")
print>>self.__stderr, (
"Traceback (most recent call last):")
while exc_tb:
print>>self.__stderr, (
' File "%s", line %s, in %s' %
(exc_tb.tb_frame.f_code.co_filename,
exc_tb.tb_lineno,
exc_tb.tb_frame.f_code.co_name))
exc_tb = exc_tb.tb_next
print>>self.__stderr, ("%s: %s" % (exc_type, exc_value))
# Make sure that exc_tb gets deleted since it is a memory
# hog; deleting everything else is just for thoroughness
finally:
del exc_type, exc_value, exc_tb
else:
if __debug__:
self._note("%s.__bootstrap(): normal return", self)
finally:
# Prevent a race in
# test_threading.test_no_refcycle_through_target when
# the exception keeps the target alive past when we
# assert that it's dead.
self.__exc_clear()
finally:
with _active_limbo_lock:
self.__stop()
try:
# We don't call self.__delete() because it also
# grabs _active_limbo_lock.
del _active[_get_ident()]
except:
pass
def __stop(self):
self.__block.acquire()
self.__stopped = True
self.__block.notify_all()
self.__block.release()
def __delete(self):
"Remove current thread from the dict of currently running threads."
# Notes about running with dummy_thread:
#
# Must take care to not raise an exception if dummy_thread is being
# used (and thus this module is being used as an instance of
# dummy_threading). dummy_thread.get_ident() always returns -1 since
# there is only one thread if dummy_thread is being used. Thus
# len(_active) is always <= 1 here, and any Thread instance created
# overwrites the (if any) thread currently registered in _active.
#
# An instance of _MainThread is always created by 'threading'. This
# gets overwritten the instant an instance of Thread is created; both
# threads return -1 from dummy_thread.get_ident() and thus have the
# same key in the dict. So when the _MainThread instance created by
# 'threading' tries to clean itself up when atexit calls this method
# it gets a KeyError if another Thread instance was created.
#
# This all means that KeyError from trying to delete something from
# _active if dummy_threading is being used is a red herring. But
# since it isn't if dummy_threading is *not* being used then don't
# hide the exception.
try:
with _active_limbo_lock:
del _active[_get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
# current_thread()), and would block.
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
def join(self, timeout=None):
if not self.__initialized:
raise RuntimeError("Thread.__init__() not called")
if not self.__started.is_set():
raise RuntimeError("cannot join thread before it is started")
if self is current_thread():
raise RuntimeError("cannot join current thread")
if __debug__:
if not self.__stopped:
self._note("%s.join(): waiting until thread stops", self)
self.__block.acquire()
try:
if timeout is None:
while not self.__stopped:
self.__block.wait()
if __debug__:
self._note("%s.join(): thread stopped", self)
else:
deadline = _time() + timeout
while not self.__stopped:
delay = deadline - _time()
if delay <= 0:
if __debug__:
self._note("%s.join(): timed out", self)
break
self.__block.wait(delay)
else:
if __debug__:
self._note("%s.join(): thread stopped", self)
finally:
self.__block.release()
@property
def name(self):
assert self.__initialized, "Thread.__init__() not called"
return self.__name
@name.setter
def name(self, name):
assert self.__initialized, "Thread.__init__() not called"
self.__name = str(name)
@property
def ident(self):
assert self.__initialized, "Thread.__init__() not called"
return self.__ident
def isAlive(self):
assert self.__initialized, "Thread.__init__() not called"
return self.__started.is_set() and not self.__stopped
is_alive = isAlive
@property
def daemon(self):
assert self.__initialized, "Thread.__init__() not called"
return self.__daemonic
@daemon.setter
def daemon(self, daemonic):
if not self.__initialized:
raise RuntimeError("Thread.__init__() not called")
if self.__started.is_set():
raise RuntimeError("cannot set daemon status of active thread");
self.__daemonic = daemonic
def isDaemon(self):
return self.daemon
def setDaemon(self, daemonic):
self.daemon = daemonic
def getName(self):
return self.name
def setName(self, name):
self.name = name
# The timer class was contributed by Itamar Shtull-Trauring
def Timer(*args, **kwargs):
return _Timer(*args, **kwargs)
class _Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=[], kwargs={})
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
def __init__(self, interval, function, args=[], kwargs={}):
Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet"""
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
self.finished.set()
# Special thread class to represent the main thread
# This is garbage collected through an exit handler
class _MainThread(Thread):
def __init__(self):
Thread.__init__(self, name="MainThread")
self._Thread__started.set()
self._set_ident()
with _active_limbo_lock:
_active[_get_ident()] = self
def _set_daemon(self):
return False
def _exitfunc(self):
self._Thread__stop()
t = _pickSomeNonDaemonThread()
if t:
if __debug__:
self._note("%s: waiting for other threads", self)
while t:
t.join()
t = _pickSomeNonDaemonThread()
if __debug__:
self._note("%s: exiting", self)
self._Thread__delete()
def _pickSomeNonDaemonThread():
for t in enumerate():
if not t.daemon and t.is_alive():
return t
return None
# Dummy thread class to represent threads not started here.
# These aren't garbage collected when they die, nor can they be waited for.
# If they invoke anything in threading.py that calls current_thread(), they
# leave an entry in the _active dict forever after.
# Their purpose is to return *something* from current_thread().
# They are marked as daemon threads so we won't wait for them
# when we exit (conform previous semantics).
class _DummyThread(Thread):
def __init__(self):
Thread.__init__(self, name=_newname("Dummy-%d"))
# Thread.__block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
# instance is immortal, that's bad, so release this resource.
del self._Thread__block
self._Thread__started.set()
self._set_ident()
with _active_limbo_lock:
_active[_get_ident()] = self
def _set_daemon(self):
return True
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
# Global API functions
def currentThread():
try:
return _active[_get_ident()]
except KeyError:
##print "current_thread(): no current thread for", _get_ident()
return _DummyThread()
current_thread = currentThread
def activeCount():
with _active_limbo_lock:
return len(_active) + len(_limbo)
active_count = activeCount
def _enumerate():
# Same as enumerate(), but without the lock. Internal use only.
return _active.values() + _limbo.values()
def enumerate():
with _active_limbo_lock:
return _active.values() + _limbo.values()
from thread import stack_size
# Create the main thread object,
# and make it available for the interpreter
# (Py_Main) as threading._shutdown.
_shutdown = _MainThread()._exitfunc
# get thread-local implementation, either from the thread
# module, or from the python fallback
try:
from thread import _local as local
except ImportError:
from _threading_local import local
def _after_fork():
# This function is called by Python/ceval.c:PyEval_ReInitThreads which
# is called from PyOS_AfterFork. Here we cleanup threading module state
# that should not exist after a fork.
# Reset _active_limbo_lock, in case we forked while the lock was held
# by another (non-forked) thread. http://bugs.python.org/issue874900
global _active_limbo_lock
_active_limbo_lock = _allocate_lock()
# fork() only copied the current thread; clear references to others.
new_active = {}
current = current_thread()
with _active_limbo_lock:
for thread in _active.itervalues():
if thread is current:
# There is only one active thread. We reset the ident to
# its new value since it can have changed.
ident = _get_ident()
thread._Thread__ident = ident
# Any condition variables hanging off of the active thread may
# be in an invalid state, so we reinitialize them.
if hasattr(thread, '_reset_internal_locks'):
thread._reset_internal_locks()
new_active[ident] = thread
else:
# All the others are already stopped.
# We don't call _Thread__stop() because it tries to acquire
# thread._Thread__block which could also have been held while
# we forked.
thread._Thread__stopped = True
_limbo.clear()
_active.clear()
_active.update(new_active)
assert len(_active) == 1
# Self-test code
def _test():
class BoundedQueue(_Verbose):
def __init__(self, limit):
_Verbose.__init__(self)
self.mon = RLock()
self.rc = Condition(self.mon)
self.wc = Condition(self.mon)
self.limit = limit
self.queue = deque()
def put(self, item):
self.mon.acquire()
while len(self.queue) >= self.limit:
self._note("put(%s): queue full", item)
self.wc.wait()
self.queue.append(item)
self._note("put(%s): appended, length now %d",
item, len(self.queue))
self.rc.notify()
self.mon.release()
def get(self):
self.mon.acquire()
while not self.queue:
self._note("get(): queue empty")
self.rc.wait()
item = self.queue.popleft()
self._note("get(): got %s, %d left", item, len(self.queue))
self.wc.notify()
self.mon.release()
return item
class ProducerThread(Thread):
def __init__(self, queue, quota):
Thread.__init__(self, name="Producer")
self.queue = queue
self.quota = quota
def run(self):
from random import random
counter = 0
while counter < self.quota:
counter = counter + 1
self.queue.put("%s.%d" % (self.name, counter))
_sleep(random() * 0.00001)
class ConsumerThread(Thread):
def __init__(self, queue, count):
Thread.__init__(self, name="Consumer")
self.queue = queue
self.count = count
def run(self):
while self.count > 0:
item = self.queue.get()
print item
self.count = self.count - 1
NP = 3
QL = 4
NI = 5
Q = BoundedQueue(QL)
P = []
for i in range(NP):
t = ProducerThread(Q, NI)
t.name = ("Producer-%d" % (i+1))
P.append(t)
C = ConsumerThread(Q, NI*NP)
for t in P:
t.start()
_sleep(0.000001)
C.start()
for t in P:
t.join()
C.join()
if __name__ == '__main__':
_test()
| gpl-3.0 |
wxgeo/geophar | wxgeometrie/sympy/physics/mechanics/tests/test_kane3.py | 33 | 14636 | import warnings
from sympy.core.compatibility import range
from sympy import evalf, symbols, pi, sin, cos, sqrt, acos, Matrix
from sympy.physics.mechanics import (ReferenceFrame, dynamicsymbols, inertia,
KanesMethod, RigidBody, Point, dot, msubs)
from sympy.utilities.exceptions import SymPyDeprecationWarning
from sympy.utilities.pytest import slow, ON_TRAVIS, skip
@slow
def test_bicycle():
if ON_TRAVIS:
skip("Too slow for travis.")
# Code to get equations of motion for a bicycle modeled as in:
# J.P Meijaard, Jim M Papadopoulos, Andy Ruina and A.L Schwab. Linearized
# dynamics equations for the balance and steer of a bicycle: a benchmark
# and review. Proceedings of The Royal Society (2007) 463, 1955-1982
# doi: 10.1098/rspa.2007.1857
# Note that this code has been crudely ported from Autolev, which is the
# reason for some of the unusual naming conventions. It was purposefully as
# similar as possible in order to aide debugging.
# Declare Coordinates & Speeds
# Simple definitions for qdots - qd = u
# Speeds are: yaw frame ang. rate, roll frame ang. rate, rear wheel frame
# ang. rate (spinning motion), frame ang. rate (pitching motion), steering
# frame ang. rate, and front wheel ang. rate (spinning motion).
# Wheel positions are ignorable coordinates, so they are not introduced.
q1, q2, q4, q5 = dynamicsymbols('q1 q2 q4 q5')
q1d, q2d, q4d, q5d = dynamicsymbols('q1 q2 q4 q5', 1)
u1, u2, u3, u4, u5, u6 = dynamicsymbols('u1 u2 u3 u4 u5 u6')
u1d, u2d, u3d, u4d, u5d, u6d = dynamicsymbols('u1 u2 u3 u4 u5 u6', 1)
# Declare System's Parameters
WFrad, WRrad, htangle, forkoffset = symbols('WFrad WRrad htangle forkoffset')
forklength, framelength, forkcg1 = symbols('forklength framelength forkcg1')
forkcg3, framecg1, framecg3, Iwr11 = symbols('forkcg3 framecg1 framecg3 Iwr11')
Iwr22, Iwf11, Iwf22, Iframe11 = symbols('Iwr22 Iwf11 Iwf22 Iframe11')
Iframe22, Iframe33, Iframe31, Ifork11 = symbols('Iframe22 Iframe33 Iframe31 Ifork11')
Ifork22, Ifork33, Ifork31, g = symbols('Ifork22 Ifork33 Ifork31 g')
mframe, mfork, mwf, mwr = symbols('mframe mfork mwf mwr')
# Set up reference frames for the system
# N - inertial
# Y - yaw
# R - roll
# WR - rear wheel, rotation angle is ignorable coordinate so not oriented
# Frame - bicycle frame
# TempFrame - statically rotated frame for easier reference inertia definition
# Fork - bicycle fork
# TempFork - statically rotated frame for easier reference inertia definition
# WF - front wheel, again posses a ignorable coordinate
N = ReferenceFrame('N')
Y = N.orientnew('Y', 'Axis', [q1, N.z])
R = Y.orientnew('R', 'Axis', [q2, Y.x])
Frame = R.orientnew('Frame', 'Axis', [q4 + htangle, R.y])
WR = ReferenceFrame('WR')
TempFrame = Frame.orientnew('TempFrame', 'Axis', [-htangle, Frame.y])
Fork = Frame.orientnew('Fork', 'Axis', [q5, Frame.x])
TempFork = Fork.orientnew('TempFork', 'Axis', [-htangle, Fork.y])
WF = ReferenceFrame('WF')
# Kinematics of the Bicycle First block of code is forming the positions of
# the relevant points
# rear wheel contact -> rear wheel mass center -> frame mass center +
# frame/fork connection -> fork mass center + front wheel mass center ->
# front wheel contact point
WR_cont = Point('WR_cont')
WR_mc = WR_cont.locatenew('WR_mc', WRrad * R.z)
Steer = WR_mc.locatenew('Steer', framelength * Frame.z)
Frame_mc = WR_mc.locatenew('Frame_mc', - framecg1 * Frame.x
+ framecg3 * Frame.z)
Fork_mc = Steer.locatenew('Fork_mc', - forkcg1 * Fork.x
+ forkcg3 * Fork.z)
WF_mc = Steer.locatenew('WF_mc', forklength * Fork.x + forkoffset * Fork.z)
WF_cont = WF_mc.locatenew('WF_cont', WFrad * (dot(Fork.y, Y.z) * Fork.y -
Y.z).normalize())
# Set the angular velocity of each frame.
# Angular accelerations end up being calculated automatically by
# differentiating the angular velocities when first needed.
# u1 is yaw rate
# u2 is roll rate
# u3 is rear wheel rate
# u4 is frame pitch rate
# u5 is fork steer rate
# u6 is front wheel rate
Y.set_ang_vel(N, u1 * Y.z)
R.set_ang_vel(Y, u2 * R.x)
WR.set_ang_vel(Frame, u3 * Frame.y)
Frame.set_ang_vel(R, u4 * Frame.y)
Fork.set_ang_vel(Frame, u5 * Fork.x)
WF.set_ang_vel(Fork, u6 * Fork.y)
# Form the velocities of the previously defined points, using the 2 - point
# theorem (written out by hand here). Accelerations again are calculated
# automatically when first needed.
WR_cont.set_vel(N, 0)
WR_mc.v2pt_theory(WR_cont, N, WR)
Steer.v2pt_theory(WR_mc, N, Frame)
Frame_mc.v2pt_theory(WR_mc, N, Frame)
Fork_mc.v2pt_theory(Steer, N, Fork)
WF_mc.v2pt_theory(Steer, N, Fork)
WF_cont.v2pt_theory(WF_mc, N, WF)
# Sets the inertias of each body. Uses the inertia frame to construct the
# inertia dyadics. Wheel inertias are only defined by principle moments of
# inertia, and are in fact constant in the frame and fork reference frames;
# it is for this reason that the orientations of the wheels does not need
# to be defined. The frame and fork inertias are defined in the 'Temp'
# frames which are fixed to the appropriate body frames; this is to allow
# easier input of the reference values of the benchmark paper. Note that
# due to slightly different orientations, the products of inertia need to
# have their signs flipped; this is done later when entering the numerical
# value.
Frame_I = (inertia(TempFrame, Iframe11, Iframe22, Iframe33, 0, 0, Iframe31), Frame_mc)
Fork_I = (inertia(TempFork, Ifork11, Ifork22, Ifork33, 0, 0, Ifork31), Fork_mc)
WR_I = (inertia(Frame, Iwr11, Iwr22, Iwr11), WR_mc)
WF_I = (inertia(Fork, Iwf11, Iwf22, Iwf11), WF_mc)
# Declaration of the RigidBody containers. ::
BodyFrame = RigidBody('BodyFrame', Frame_mc, Frame, mframe, Frame_I)
BodyFork = RigidBody('BodyFork', Fork_mc, Fork, mfork, Fork_I)
BodyWR = RigidBody('BodyWR', WR_mc, WR, mwr, WR_I)
BodyWF = RigidBody('BodyWF', WF_mc, WF, mwf, WF_I)
# The kinematic differential equations; they are defined quite simply. Each
# entry in this list is equal to zero.
kd = [q1d - u1, q2d - u2, q4d - u4, q5d - u5]
# The nonholonomic constraints are the velocity of the front wheel contact
# point dotted into the X, Y, and Z directions; the yaw frame is used as it
# is "closer" to the front wheel (1 less DCM connecting them). These
# constraints force the velocity of the front wheel contact point to be 0
# in the inertial frame; the X and Y direction constraints enforce a
# "no-slip" condition, and the Z direction constraint forces the front
# wheel contact point to not move away from the ground frame, essentially
# replicating the holonomic constraint which does not allow the frame pitch
# to change in an invalid fashion.
conlist_speed = [WF_cont.vel(N) & Y.x, WF_cont.vel(N) & Y.y, WF_cont.vel(N) & Y.z]
# The holonomic constraint is that the position from the rear wheel contact
# point to the front wheel contact point when dotted into the
# normal-to-ground plane direction must be zero; effectively that the front
# and rear wheel contact points are always touching the ground plane. This
# is actually not part of the dynamic equations, but instead is necessary
# for the lineraization process.
conlist_coord = [WF_cont.pos_from(WR_cont) & Y.z]
# The force list; each body has the appropriate gravitational force applied
# at its mass center.
FL = [(Frame_mc, -mframe * g * Y.z),
(Fork_mc, -mfork * g * Y.z),
(WF_mc, -mwf * g * Y.z),
(WR_mc, -mwr * g * Y.z)]
BL = [BodyFrame, BodyFork, BodyWR, BodyWF]
# The N frame is the inertial frame, coordinates are supplied in the order
# of independent, dependent coordinates, as are the speeds. The kinematic
# differential equation are also entered here. Here the dependent speeds
# are specified, in the same order they were provided in earlier, along
# with the non-holonomic constraints. The dependent coordinate is also
# provided, with the holonomic constraint. Again, this is only provided
# for the linearization process.
KM = KanesMethod(N, q_ind=[q1, q2, q5],
q_dependent=[q4], configuration_constraints=conlist_coord,
u_ind=[u2, u3, u5],
u_dependent=[u1, u4, u6], velocity_constraints=conlist_speed,
kd_eqs=kd)
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=SymPyDeprecationWarning)
(fr, frstar) = KM.kanes_equations(FL, BL)
# This is the start of entering in the numerical values from the benchmark
# paper to validate the eigen values of the linearized equations from this
# model to the reference eigen values. Look at the aforementioned paper for
# more information. Some of these are intermediate values, used to
# transform values from the paper into the coordinate systems used in this
# model.
PaperRadRear = 0.3
PaperRadFront = 0.35
HTA = evalf.N(pi / 2 - pi / 10)
TrailPaper = 0.08
rake = evalf.N(-(TrailPaper*sin(HTA)-(PaperRadFront*cos(HTA))))
PaperWb = 1.02
PaperFrameCgX = 0.3
PaperFrameCgZ = 0.9
PaperForkCgX = 0.9
PaperForkCgZ = 0.7
FrameLength = evalf.N(PaperWb*sin(HTA)-(rake-(PaperRadFront-PaperRadRear)*cos(HTA)))
FrameCGNorm = evalf.N((PaperFrameCgZ - PaperRadRear-(PaperFrameCgX/sin(HTA))*cos(HTA))*sin(HTA))
FrameCGPar = evalf.N((PaperFrameCgX / sin(HTA) + (PaperFrameCgZ - PaperRadRear - PaperFrameCgX / sin(HTA) * cos(HTA)) * cos(HTA)))
tempa = evalf.N((PaperForkCgZ - PaperRadFront))
tempb = evalf.N((PaperWb-PaperForkCgX))
tempc = evalf.N(sqrt(tempa**2+tempb**2))
PaperForkL = evalf.N((PaperWb*cos(HTA)-(PaperRadFront-PaperRadRear)*sin(HTA)))
ForkCGNorm = evalf.N(rake+(tempc * sin(pi/2-HTA-acos(tempa/tempc))))
ForkCGPar = evalf.N(tempc * cos((pi/2-HTA)-acos(tempa/tempc))-PaperForkL)
# Here is the final assembly of the numerical values. The symbol 'v' is the
# forward speed of the bicycle (a concept which only makes sense in the
# upright, static equilibrium case?). These are in a dictionary which will
# later be substituted in. Again the sign on the *product* of inertia
# values is flipped here, due to different orientations of coordinate
# systems.
v = symbols('v')
val_dict = {WFrad: PaperRadFront,
WRrad: PaperRadRear,
htangle: HTA,
forkoffset: rake,
forklength: PaperForkL,
framelength: FrameLength,
forkcg1: ForkCGPar,
forkcg3: ForkCGNorm,
framecg1: FrameCGNorm,
framecg3: FrameCGPar,
Iwr11: 0.0603,
Iwr22: 0.12,
Iwf11: 0.1405,
Iwf22: 0.28,
Ifork11: 0.05892,
Ifork22: 0.06,
Ifork33: 0.00708,
Ifork31: 0.00756,
Iframe11: 9.2,
Iframe22: 11,
Iframe33: 2.8,
Iframe31: -2.4,
mfork: 4,
mframe: 85,
mwf: 3,
mwr: 2,
g: 9.81,
q1: 0,
q2: 0,
q4: 0,
q5: 0,
u1: 0,
u2: 0,
u3: v / PaperRadRear,
u4: 0,
u5: 0,
u6: v / PaperRadFront}
# Linearizes the forcing vector; the equations are set up as MM udot =
# forcing, where MM is the mass matrix, udot is the vector representing the
# time derivatives of the generalized speeds, and forcing is a vector which
# contains both external forcing terms and internal forcing terms, such as
# centripital or coriolis forces. This actually returns a matrix with as
# many rows as *total* coordinates and speeds, but only as many columns as
# independent coordinates and speeds.
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=SymPyDeprecationWarning)
forcing_lin = KM.linearize()[0]
# As mentioned above, the size of the linearized forcing terms is expanded
# to include both q's and u's, so the mass matrix must have this done as
# well. This will likely be changed to be part of the linearized process,
# for future reference.
MM_full = KM.mass_matrix_full
MM_full_s = msubs(MM_full, val_dict)
forcing_lin_s = msubs(forcing_lin, KM.kindiffdict(), val_dict)
MM_full_s = MM_full_s.evalf()
forcing_lin_s = forcing_lin_s.evalf()
# Finally, we construct an "A" matrix for the form xdot = A x (x being the
# state vector, although in this case, the sizes are a little off). The
# following line extracts only the minimum entries required for eigenvalue
# analysis, which correspond to rows and columns for lean, steer, lean
# rate, and steer rate.
Amat = MM_full_s.inv() * forcing_lin_s
A = Amat.extract([1, 2, 4, 6], [1, 2, 3, 5])
# Precomputed for comparison
Res = Matrix([[ 0, 0, 1.0, 0],
[ 0, 0, 0, 1.0],
[9.48977444677355, -0.891197738059089*v**2 - 0.571523173729245, -0.105522449805691*v, -0.330515398992311*v],
[11.7194768719633, -1.97171508499972*v**2 + 30.9087533932407, 3.67680523332152*v, -3.08486552743311*v]])
# Actual eigenvalue comparison
eps = 1.e-12
for i in range(6):
error = Res.subs(v, i) - A.subs(v, i)
assert all(abs(x) < eps for x in error)
| gpl-2.0 |
sauloal/pycluster | pypy-1.9_64/lib-python/2.7/pipes.py | 82 | 9647 | """Conversion pipeline templates.
The problem:
------------
Suppose you have some data that you want to convert to another format,
such as from GIF image format to PPM image format. Maybe the
conversion involves several steps (e.g. piping it through compress or
uuencode). Some of the conversion steps may require that their input
is a disk file, others may be able to read standard input; similar for
their output. The input to the entire conversion may also be read
from a disk file or from an open file, and similar for its output.
The module lets you construct a pipeline template by sticking one or
more conversion steps together. It will take care of creating and
removing temporary files if they are necessary to hold intermediate
data. You can then use the template to do conversions from many
different sources to many different destinations. The temporary
file names used are different each time the template is used.
The templates are objects so you can create templates for many
different conversion steps and store them in a dictionary, for
instance.
Directions:
-----------
To create a template:
t = Template()
To add a conversion step to a template:
t.append(command, kind)
where kind is a string of two characters: the first is '-' if the
command reads its standard input or 'f' if it requires a file; the
second likewise for the output. The command must be valid /bin/sh
syntax. If input or output files are required, they are passed as
$IN and $OUT; otherwise, it must be possible to use the command in
a pipeline.
To add a conversion step at the beginning:
t.prepend(command, kind)
To convert a file to another file using a template:
sts = t.copy(infile, outfile)
If infile or outfile are the empty string, standard input is read or
standard output is written, respectively. The return value is the
exit status of the conversion pipeline.
To open a file for reading or writing through a conversion pipeline:
fp = t.open(file, mode)
where mode is 'r' to read the file, or 'w' to write it -- just like
for the built-in function open() or for os.popen().
To create a new template object initialized to a given one:
t2 = t.clone()
For an example, see the function test() at the end of the file.
""" # '
import re
import os
import tempfile
import string
__all__ = ["Template"]
# Conversion step kinds
FILEIN_FILEOUT = 'ff' # Must read & write real files
STDIN_FILEOUT = '-f' # Must write a real file
FILEIN_STDOUT = 'f-' # Must read a real file
STDIN_STDOUT = '--' # Normal pipeline element
SOURCE = '.-' # Must be first, writes stdout
SINK = '-.' # Must be last, reads stdin
stepkinds = [FILEIN_FILEOUT, STDIN_FILEOUT, FILEIN_STDOUT, STDIN_STDOUT, \
SOURCE, SINK]
class Template:
"""Class representing a pipeline template."""
def __init__(self):
"""Template() returns a fresh pipeline template."""
self.debugging = 0
self.reset()
def __repr__(self):
"""t.__repr__() implements repr(t)."""
return '<Template instance, steps=%r>' % (self.steps,)
def reset(self):
"""t.reset() restores a pipeline template to its initial state."""
self.steps = []
def clone(self):
"""t.clone() returns a new pipeline template with identical
initial state as the current one."""
t = Template()
t.steps = self.steps[:]
t.debugging = self.debugging
return t
def debug(self, flag):
"""t.debug(flag) turns debugging on or off."""
self.debugging = flag
def append(self, cmd, kind):
"""t.append(cmd, kind) adds a new step at the end."""
if type(cmd) is not type(''):
raise TypeError, \
'Template.append: cmd must be a string'
if kind not in stepkinds:
raise ValueError, \
'Template.append: bad kind %r' % (kind,)
if kind == SOURCE:
raise ValueError, \
'Template.append: SOURCE can only be prepended'
if self.steps and self.steps[-1][1] == SINK:
raise ValueError, \
'Template.append: already ends with SINK'
if kind[0] == 'f' and not re.search(r'\$IN\b', cmd):
raise ValueError, \
'Template.append: missing $IN in cmd'
if kind[1] == 'f' and not re.search(r'\$OUT\b', cmd):
raise ValueError, \
'Template.append: missing $OUT in cmd'
self.steps.append((cmd, kind))
def prepend(self, cmd, kind):
"""t.prepend(cmd, kind) adds a new step at the front."""
if type(cmd) is not type(''):
raise TypeError, \
'Template.prepend: cmd must be a string'
if kind not in stepkinds:
raise ValueError, \
'Template.prepend: bad kind %r' % (kind,)
if kind == SINK:
raise ValueError, \
'Template.prepend: SINK can only be appended'
if self.steps and self.steps[0][1] == SOURCE:
raise ValueError, \
'Template.prepend: already begins with SOURCE'
if kind[0] == 'f' and not re.search(r'\$IN\b', cmd):
raise ValueError, \
'Template.prepend: missing $IN in cmd'
if kind[1] == 'f' and not re.search(r'\$OUT\b', cmd):
raise ValueError, \
'Template.prepend: missing $OUT in cmd'
self.steps.insert(0, (cmd, kind))
def open(self, file, rw):
"""t.open(file, rw) returns a pipe or file object open for
reading or writing; the file is the other end of the pipeline."""
if rw == 'r':
return self.open_r(file)
if rw == 'w':
return self.open_w(file)
raise ValueError, \
'Template.open: rw must be \'r\' or \'w\', not %r' % (rw,)
def open_r(self, file):
"""t.open_r(file) and t.open_w(file) implement
t.open(file, 'r') and t.open(file, 'w') respectively."""
if not self.steps:
return open(file, 'r')
if self.steps[-1][1] == SINK:
raise ValueError, \
'Template.open_r: pipeline ends width SINK'
cmd = self.makepipeline(file, '')
return os.popen(cmd, 'r')
def open_w(self, file):
if not self.steps:
return open(file, 'w')
if self.steps[0][1] == SOURCE:
raise ValueError, \
'Template.open_w: pipeline begins with SOURCE'
cmd = self.makepipeline('', file)
return os.popen(cmd, 'w')
def copy(self, infile, outfile):
return os.system(self.makepipeline(infile, outfile))
def makepipeline(self, infile, outfile):
cmd = makepipeline(infile, self.steps, outfile)
if self.debugging:
print cmd
cmd = 'set -x; ' + cmd
return cmd
def makepipeline(infile, steps, outfile):
# Build a list with for each command:
# [input filename or '', command string, kind, output filename or '']
list = []
for cmd, kind in steps:
list.append(['', cmd, kind, ''])
#
# Make sure there is at least one step
#
if not list:
list.append(['', 'cat', '--', ''])
#
# Take care of the input and output ends
#
[cmd, kind] = list[0][1:3]
if kind[0] == 'f' and not infile:
list.insert(0, ['', 'cat', '--', ''])
list[0][0] = infile
#
[cmd, kind] = list[-1][1:3]
if kind[1] == 'f' and not outfile:
list.append(['', 'cat', '--', ''])
list[-1][-1] = outfile
#
# Invent temporary files to connect stages that need files
#
garbage = []
for i in range(1, len(list)):
lkind = list[i-1][2]
rkind = list[i][2]
if lkind[1] == 'f' or rkind[0] == 'f':
(fd, temp) = tempfile.mkstemp()
os.close(fd)
garbage.append(temp)
list[i-1][-1] = list[i][0] = temp
#
for item in list:
[inf, cmd, kind, outf] = item
if kind[1] == 'f':
cmd = 'OUT=' + quote(outf) + '; ' + cmd
if kind[0] == 'f':
cmd = 'IN=' + quote(inf) + '; ' + cmd
if kind[0] == '-' and inf:
cmd = cmd + ' <' + quote(inf)
if kind[1] == '-' and outf:
cmd = cmd + ' >' + quote(outf)
item[1] = cmd
#
cmdlist = list[0][1]
for item in list[1:]:
[cmd, kind] = item[1:3]
if item[0] == '':
if 'f' in kind:
cmd = '{ ' + cmd + '; }'
cmdlist = cmdlist + ' |\n' + cmd
else:
cmdlist = cmdlist + '\n' + cmd
#
if garbage:
rmcmd = 'rm -f'
for file in garbage:
rmcmd = rmcmd + ' ' + quote(file)
trapcmd = 'trap ' + quote(rmcmd + '; exit') + ' 1 2 3 13 14 15'
cmdlist = trapcmd + '\n' + cmdlist + '\n' + rmcmd
#
return cmdlist
# Reliably quote a string as a single argument for /bin/sh
# Safe unquoted
_safechars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./')
def quote(file):
"""Return a shell-escaped version of the file string."""
for c in file:
if c not in _safechars:
break
else:
if not file:
return "''"
return file
# use single quotes, and put single quotes into double quotes
# the string $'b is then quoted as '$'"'"'b'
return "'" + file.replace("'", "'\"'\"'") + "'"
| mit |
named-data/ndn-atmos | lib/hep_translators/hep_translator/hep2ndn_parser/conf_file_parser.py | 1 | 4345 | #!/usr/bin/env python3
# -*- Mode:python; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (c) 2015, Colorado State University.
#
# This file is part of ndn-atmos.
#
# ndn-atmos is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later version.
#
# ndn-atmos is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received copies of the GNU General Public License and GNU Lesser
# General Public License along with ndn-atmos, e.g., in COPYING.md file. If not, see
# <http://www.gnu.org/licenses/>.
#
# See AUTHORS.md for complete list of ndn-atmos authors and contributors.
'''This is the config file parser module.
Input = object with command line parameters.
Output = list of components for different config sections'''
import configparser
import sys, traceback
class ParseConf(object):
'''parses the name schema file and returns name mappings for translated output'''
def __init__(self, confName):
self.confName = confName
if __debug__:
print("Config file name: %s" %(self.confName))
self.filenameMap = []
self.ndnNameMap = []
self.seperatorsMap = []
self.userDefinedConfDir = {}
self.translator = []
#initialize the parser
self.parser = configparser.SafeConfigParser()
self.parser.optionxform=str
self.parser.read(self.confName)
self.fullConf = {}
#do the mapping
res = self.getMappings(confName)
if res is False:
print("Error getting values from config file")
raise error.with_traceback(sys.exc_info()[2])
def _parseConf(self):
#iterate over them and store the name components in fullConf
try:
for sectionName in self.parser.sections():
self.conf = {}
for name, value in self.parser.items(sectionName):
self.conf[name] = value
self.fullConf[sectionName] = self.conf
if __debug__:
print(self.fullConf)
except KeyError:
print("Key %s is not found in config file" %(name))
print(sys.exc_info()[2])
except TypeError:
print("TypeError while parsing config file")
print(sys.exc_info()[2])
return self.fullConf
def _doParsing(self):
#parser now contain a dictionary with the sections in conf
# first elements are section and second ones are variables defined in config file
try:
self.filenameMap = self.fullConf['Name']['filenameMapping'].replace(" ", "").split(',')
self.ndnNameMap = self.fullConf['Name']['ndnMapping'].replace(" ", "").split(',')
# user defined components look like this
#activity:cmip5, subactivity:atmos, organization:csu, ensemble:r3i1p1
## userDefinedConf = self.fullConf['Name']['userDefinedComps'].replace(" ", "").split(',')
## for item in userDefinedConf:
## key, value = item.split(":")
## self.userDefinedConfDir[key] = [value]
self.seperatorsMap = self.fullConf['Name']['seperators'].replace(" ", "").split(',')
#reads which translator to use
self.translator = self.fullConf['Translator']['translator'].replace(" ", "")
except KeyError:
print("Key %s is not found in config file" %(self.confName))
print(sys.exc_info()[2])
except TypeError:
print("TypeError while parsing config file")
print(sys.exc_info()[2])
def getMappings(self, confName):
'''parses the schema file and provides name mappings'''
fullConf = self._parseConf()
#if dict is not empty
if fullConf:
res = self._doParsing()
if len(self.filenameMap) == 0 or len(self.ndnNameMap) == 0 or len(self.translator) == 0:
return False
else:
return True
else:
return False
| gpl-3.0 |
mlperf/training_results_v0.7 | Google/benchmarks/transformer/implementations/transformer-research-TF-tpu-v4-16/google/trainer/base_runner.py | 3 | 1048 | # Lint as: python2, python3
"""Base class for all jobs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo import base_runner as lingvo_base_runner
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo import compat as tf
from REDACTED.learning.REDACTED.research.babelfish import REDACTED_utils
#from third_party.tensorflow_models.mlperf.models.rough.transformer_lingvo.google import REDACTED_utils
class BaseRunner(lingvo_base_runner.BaseRunner):
"""Base class for all jobs."""
def _SetStatusMessage(self, message, retrying=False):
"""Update the REDACTED status message for this task."""
REDACTED_utils.SetBorgStatusMessage(
self._FormatStatusMessage(message, retrying))
def _CreateSummaryWriter(self, logdir):
"""Creates and returns a tf summary writer."""
suffix = None
return tf.summary.FileWriter(logdir, filename_suffix=suffix)
| apache-2.0 |
vipullakhani/mi-instrument | mi/platform/rsn/simulator/oms_values.py | 10 | 7128 | #!/usr/bin/env python
"""
@package ion.agents.platform.rsn.simulator.oms_values
@file ion/agents/platform/rsn/simulator/oms_values.py
@author Carlos Rueda
@brief Platform attribute value generators for the RSN OMS simulator.
"""
__author__ = 'Carlos Rueda'
__license__ = 'Apache 2.0'
import time
import ntplib
import math
# time begins a few secs ago from now for purposes of reporting
_START_TIME = ntplib.system_to_ntp_time(time.time() - 30)
# maximum value array size for a single generation call
_MAX_RESULT_SIZE = 1000
# next value for generators created by _create_simple_generator
_next_value = 990000
def _create_simple_generator(gen_period):
"""
Returns a simple generator that reports incremental values every given
time period.
@param gen_period discretize the time axis by this period in secs
@retval A function to be called with parameters (from_time, to_time) where
from_time and to_time are the lower and upper limits (both
inclusive) of desired time window (NTP).
"""
def _gen(from_time, to_time):
global _next_value
if from_time < _START_TIME:
from_time = _START_TIME
# t: initial abscissa coordinate within the time window
l_from_time = long(from_time - 2*gen_period)
t = float((l_from_time / gen_period) * gen_period)
while t < from_time:
t += gen_period
values = []
while t <= to_time:
val = _next_value
_next_value += 1
timestamp = t
values.append((val, timestamp))
t += gen_period
if len(values) == _MAX_RESULT_SIZE:
break
return values
return _gen
def _create_sine_generator(sine_period, gen_period, min_val, max_val):
"""
Returns a sine stream fluctuating between min_val and max_val.
@param sine_period Sine period in secs
@param gen_period discretize the time axis by this period in secs
@param min_val min value
@param max_val max value
@retval A function to be called with parameters (from_time, to_time) where
from_time and to_time are the lower and upper limits (both
inclusive) of desired time window (NTP).
"""
twopi = 2 * math.pi
def _gen(from_time, to_time):
if from_time < _START_TIME:
from_time = _START_TIME
# t: initial abscissa coordinate within the time window
l_from_time = long(from_time - 2*gen_period)
t = float((l_from_time / gen_period) * gen_period)
while t < from_time:
t += gen_period
range2 = (max_val - min_val) / 2
values = []
while t <= to_time:
s = math.sin(t / sine_period * twopi)
val = s * range2 + (max_val + min_val) / 2
timestamp = t
values.append((val, timestamp))
t += gen_period
if len(values) == _MAX_RESULT_SIZE:
break
return values
return _gen
# generators per platform-ID/attribute-name:
_plat_attr_generators = {
# we used to have a couple here, but now none for the moment.
# An example would be:
# ('LJ01D', 'input_voltage'): _create_sine_generator(sine_period=30,
# gen_period=2.5,
# min_val=-500,
# max_val=+500),
}
# generators per attribute name:
_attribute_generators = {
'input_voltage':
_create_sine_generator(sine_period=30,
gen_period=2.5,
min_val=-500,
max_val=+500),
'input_bus_current':
_create_sine_generator(sine_period=50,
gen_period=5,
min_val=-300,
max_val=+300),
'MVPC_temperature':
_create_sine_generator(sine_period=20,
gen_period=4,
min_val=-200,
max_val=+200),
'MVPC_pressure_1':
_create_sine_generator(sine_period=20,
gen_period=4,
min_val=-100,
max_val=+100),
}
_default_generator = _create_simple_generator(gen_period=5)
def generate_values(platform_id, attr_id, from_time, to_time):
"""
Generates synthetic values within a given time window (both ends are
inclusive). Times are NTP.
@param platform_id Platform ID
@param attr_id Attribute ID. Only the name part is considered. See OOIION-1551.
@param from_time lower limit of desired time window
@param to_time upper limit of desired time window
"""
# get the attribute name from the given ID:
separator = attr_id.rfind('|')
attr_name = attr_id[:separator] if separator >= 0 else attr_id
# try by platform/attribute:
if (platform_id, attr_name) in _plat_attr_generators:
gen = _plat_attr_generators[(platform_id, attr_name)]
# else: try by the attribute only:
elif attr_name in _attribute_generators:
gen = _attribute_generators[attr_name]
else:
gen = _default_generator
return gen(from_time, to_time)
if __name__ == "__main__": # pragma: no cover
# do not restrict the absolute from_time for this demo program:
_START_TIME = 0
import sys
if len(sys.argv) < 5:
print("""
USAGE:
oms_values.py platform_id attr_id delta_from delta_to
Generates values in window [curr_time + delta_from, curr_time + delta_to]
Example:
oms_values.py Node1A input_voltage -35 0
""")
exit()
cur_time = ntplib.system_to_ntp_time(time.time())
platform_id = sys.argv[1]
attr_id = sys.argv[2]
delta_from = float(sys.argv[3])
delta_to = float(sys.argv[4])
from_time = cur_time + delta_from
to_time = cur_time + delta_to
values = generate_values(platform_id, attr_id, from_time, to_time)
print("Generated %d values in time window [%s, %s]:" % (
len(values), from_time, to_time))
for n, (val, t) in enumerate(values):
print("\t%2d: %5.2f -> %+4.3f" % (n, t, val))
"""
$ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A other_attr -35 0
Generated 7 values in time window [3561992754.4, 3561992789.4]:
0: 3561992755.00 -> +990000.000
1: 3561992760.00 -> +990001.000
2: 3561992765.00 -> +990002.000
3: 3561992770.00 -> +990003.000
4: 3561992775.00 -> +990004.000
5: 3561992780.00 -> +990005.000
6: 3561992785.00 -> +990006.000
$ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A input_voltage -35 0
Generated 7 values in time window [3561992757.86, 3561992792.86]:
0: 3561992760.00 -> -0.000
1: 3561992765.00 -> +433.013
2: 3561992770.00 -> +433.013
3: 3561992775.00 -> +0.000
4: 3561992780.00 -> -433.013
5: 3561992785.00 -> -433.013
6: 3561992790.00 -> -0.000
"""
| bsd-2-clause |
elvandy/nltools | nltools/data/adjacency.py | 1 | 34227 | from __future__ import division
'''
This data class is for working with similarity/dissimilarity matrices
'''
__author__ = ["Luke Chang"]
__license__ = "MIT"
import os
import pandas as pd
import numpy as np
import six
from copy import deepcopy
from sklearn.metrics.pairwise import pairwise_distances
from sklearn.manifold import MDS
from sklearn.utils import check_random_state
from scipy.spatial.distance import squareform
from scipy.stats import ttest_1samp
import seaborn as sns
import matplotlib.pyplot as plt
from nltools.stats import (correlation_permutation,
one_sample_permutation,
two_sample_permutation,
summarize_bootstrap,
matrix_permutation)
from nltools.stats import regress as regression
from nltools.plotting import (plot_stacked_adjacency,
plot_silhouette)
from nltools.utils import (all_same,
attempt_to_import,
concatenate,
_bootstrap_apply_func)
from .design_matrix import Design_Matrix
from joblib import Parallel, delayed
# Optional dependencies
nx = attempt_to_import('networkx', 'nx')
MAX_INT = np.iinfo(np.int32).max
class Adjacency(object):
'''
Adjacency is a class to represent Adjacency matrices as a vector rather
than a 2-dimensional matrix. This makes it easier to perform data
manipulation and analyses.
Args:
data: pandas data instance or list of files
matrix_type: (str) type of matrix. Possible values include:
['distance','similarity','directed','distance_flat',
'similarity_flat','directed_flat']
Y: Pandas DataFrame of training labels
**kwargs: Additional keyword arguments
'''
def __init__(self, data=None, Y=None, matrix_type=None, labels=None,
**kwargs):
if matrix_type is not None:
if matrix_type.lower() not in ['distance','similarity','directed',
'distance_flat','similarity_flat',
'directed_flat']:
raise ValueError("matrix_type must be [None,'distance', "
"'similarity','directed','distance_flat', "
"'similarity_flat','directed_flat']")
if data is None:
self.data = np.array([])
self.matrix_type = 'empty'
self.is_single_matrix = np.nan
self.issymmetric = np.nan
elif isinstance(data, list):
if isinstance(data[0], Adjacency):
tmp = concatenate(data)
for item in ['data', 'matrix_type', 'Y','issymmetric']:
setattr(self, item, getattr(tmp,item))
else:
d_all = []; symmetric_all = []; matrix_type_all = []
for d in data:
data_tmp, issymmetric_tmp, matrix_type_tmp, _ = self._import_single_data(d, matrix_type=matrix_type)
d_all.append(data_tmp)
symmetric_all.append(issymmetric_tmp)
matrix_type_all.append(matrix_type_tmp)
if not all_same(symmetric_all):
raise ValueError('Not all matrices are of the same '
'symmetric type.')
if not all_same(matrix_type_all):
raise ValueError('Not all matrices are of the same matrix '
'type.')
self.data = np.array(d_all)
self.issymmetric = symmetric_all[0]
self.matrix_type = matrix_type_all[0]
self.is_single_matrix = False
else:
self.data, self.issymmetric, self.matrix_type, self.is_single_matrix = self._import_single_data(data, matrix_type=matrix_type)
if Y is not None:
if isinstance(Y, six.string_types):
if os.path.isfile(Y):
Y = pd.read_csv(Y, header=None, index_col=None)
if isinstance(Y, pd.DataFrame):
if self.data.shape[0] != len(Y):
raise ValueError("Y does not match the correct size of "
"data")
self.Y = Y
else:
raise ValueError("Make sure Y is a pandas data frame.")
else:
self.Y = pd.DataFrame()
if labels is not None:
if not isinstance(labels, (list, np.ndarray)):
raise ValueError( "Make sure labels is a list or numpy array.")
if self.is_single_matrix:
if len(labels) != self.square_shape()[0]:
raise ValueError('Make sure the length of labels matches the shape of data.')
self.labels = deepcopy(labels)
else:
if len(labels) != len(self):
if len(labels) != self.square_shape()[0]:
raise ValueError('Make sure length of labels either '
'matches the number of Adjacency '
'matrices or the size of a single '
'matrix.')
else:
self.labels = list(labels) * len(self)
else:
if np.all(np.array([len(x) for x in labels]) !=self.square_shape()[0]):
raise ValueError("All lists of labels must be same length as shape of data.")
self.labels = deepcopy(labels)
else:
self.labels = None
def __repr__(self):
return ("%s.%s(shape=%s, square_shape=%s, Y=%s, is_symmetric=%s,"
"matrix_type=%s)") % (
self.__class__.__module__,
self.__class__.__name__,
self.shape(),
self.square_shape(),
len(self.Y),
self.issymmetric,
self.matrix_type)
def __getitem__(self,index):
new = self.copy()
if isinstance(index, int):
new.data = np.array(self.data[index, :]).flatten()
new.is_single_matrix = True
else:
new.data = np.array(self.data[index, :])
if not self.Y.empty:
new.Y = self.Y.iloc[index]
return new
def __len__(self):
if self.is_single_matrix:
return 1
else:
return self.data.shape[0]
def __iter__(self):
for x in range(len(self)):
yield self[x]
def __add__(self, y):
new = deepcopy(self)
if isinstance(y, (int, float)):
new.data = new.data + y
if isinstance(y, Adjacency):
if self.shape() != y.shape():
raise ValueError('Both Adjacency() instances need to be the '
'same shape.')
new.data = new.data + y.data
return new
def __sub__(self, y):
new = deepcopy(self)
if isinstance(y, (int, float)):
new.data = new.data - y
if isinstance(y, Adjacency):
if self.shape() != y.shape():
raise ValueError('Both Adjacency() instances need to be the '
'same shape.')
new.data = new.data - y.data
return new
def __mul__(self, y):
new = deepcopy(self)
if isinstance(y, (int, float)):
new.data = new.data * y
if isinstance(y, Adjacency):
if self.shape() != y.shape():
raise ValueError('Both Adjacency() instances need to be the '
'same shape.')
new.data = np.multiply(new.data, y.data)
return new
def _import_single_data(self, data, matrix_type=None):
''' Helper function to import single data matrix.'''
if isinstance(data, six.string_types):
if os.path.isfile(data):
data = pd.read_csv(data)
else:
raise ValueError('Make sure you have specified a valid file '
'path.')
def test_is_single_matrix(data):
if len(data.shape) == 1:
return True
else:
return False
if matrix_type is not None:
if matrix_type.lower() == 'distance_flat':
matrix_type = 'distance'
data = np.array(data)
issymmetric = True
is_single_matrix = test_is_single_matrix(data)
elif matrix_type.lower() == 'similarity_flat':
matrix_type = 'similarity'
data = np.array(data)
issymmetric = True
is_single_matrix = test_is_single_matrix(data)
elif matrix_type.lower() == 'directed_flat':
matrix_type = 'directed'
data = np.array(data).flatten()
issymmetric = False
is_single_matrix = test_is_single_matrix(data)
elif matrix_type.lower() in ['distance', 'similarity', 'directed']:
if data.shape[0] != data.shape[1]:
raise ValueError('Data matrix must be square')
data = np.array(data)
matrix_type = matrix_type.lower()
if matrix_type in ['distance', 'similarity']:
issymmetric = True
data = data[np.triu_indices(data.shape[0], k=1)]
else:
issymmetric = False
if isinstance(data, pd.DataFrame):
data = data.values.flatten()
elif isinstance(data, np.ndarray):
data = data.flatten()
is_single_matrix = True
else:
if len(data.shape) == 1: # Single Vector
try:
data = squareform(data)
except ValueError:
print('Data is not flattened upper triangle from '
'similarity/distance matrix or flattened directed '
'matrix.')
is_single_matrix = True
elif data.shape[0] == data.shape[1]: # Square Matrix
is_single_matrix = True
else: # Rectangular Matrix
data_all = deepcopy(data)
try:
data = squareform(data_all[0, :])
except ValueError:
print('Data is not flattened upper triangle from multiple '
'similarity/distance matrices or flattened directed '
'matrices.')
is_single_matrix = False
# Test if matrix is symmetrical
if np.all(data[np.triu_indices(data.shape[0], k=1)] == data.T[np.triu_indices(data.shape[0], k=1)]):
issymmetric = True
else:
issymmetric = False
# Determine matrix type
if issymmetric:
if np.sum(np.diag(data)) == 0:
matrix_type = 'distance'
elif np.sum(np.diag(data)) == data.shape[0]:
matrix_type = 'similarity'
data = data[np.triu_indices(data.shape[0], k=1)]
else:
matrix_type = 'directed'
data = data.flatten()
if not is_single_matrix:
data = data_all
return (data, issymmetric, matrix_type, is_single_matrix)
def isempty(self):
'''Check if Adjacency object is empty'''
return bool(self.matrix_type is 'empty')
def squareform(self):
'''Convert adjacency back to squareform'''
if self.issymmetric:
if self.is_single_matrix:
return squareform(self.data)
else:
return [squareform(x.data) for x in self]
else:
if self.is_single_matrix:
return self.data.reshape(int(np.sqrt(self.data.shape[0])),
int(np.sqrt(self.data.shape[0])))
else:
return [x.data.reshape(int(np.sqrt(x.data.shape[0])),
int(np.sqrt(x.data.shape[0]))) for x in self]
def plot(self, limit=3, *args, **kwargs):
''' Create Heatmap of Adjacency Matrix'''
if self.is_single_matrix:
f, a = plt.subplots(nrows=1, figsize=(7, 5))
if self.labels is None:
sns.heatmap(self.squareform(), square=True, ax=a,
*args, **kwargs)
else:
sns.heatmap(self.squareform(), square=True, ax=a,
xticklabels=self.labels,
yticklabels=self.labels,
*args, **kwargs)
else:
n_subs = np.minimum(len(self), limit)
f, a = plt.subplots(nrows=n_subs, figsize=(7, len(self)*5))
if self.labels is None:
for i in range(n_subs):
sns.heatmap(self[i].squareform(), square=True, ax=a[i],
*args, **kwargs)
else:
for i in range(n_subs):
sns.heatmap(self[i].squareform(), square=True,
xticklabels=self.labels[i],
yticklabels=self.labels[i],
ax=a[i], *args, **kwargs)
return f
def mean(self, axis=0):
''' Calculate mean of Adjacency
Args:
axis: calculate mean over features (0) or data (1).
For data it will be on upper triangle.
Returns:
mean: float if single, adjacency if axis=0, np.array if axis=1
and multiple
'''
if self.is_single_matrix:
return np.mean(self.data)
else:
if axis == 0:
return Adjacency(data=np.mean(self.data, axis=axis),
matrix_type=self.matrix_type + '_flat')
elif axis == 1:
return np.mean(self.data, axis=axis)
def std(self, axis=0):
''' Calculate standard deviation of Adjacency
Args:
axis: calculate std over features (0) or data (1).
For data it will be on upper triangle.
Returns:
std: float if single, adjacency if axis=0, np.array if axis=1 and
multiple
'''
if self.is_single_matrix:
return np.std(self.data)
else:
if axis == 0:
return Adjacency(data=np.std(self.data, axis=axis),
matrix_type=self.matrix_type + '_flat')
elif axis == 1:
return np.std(self.data, axis=axis)
def shape(self):
''' Calculate shape of data. '''
return self.data.shape
def square_shape(self):
''' Calculate shape of squareform data. '''
if self.matrix_type is 'empty':
return np.array([])
else:
if self.is_single_matrix:
return self.squareform().shape
else:
return self[0].squareform().shape
def copy(self):
''' Create a copy of Adjacency object.'''
return deepcopy(self)
def append(self, data):
''' Append data to Adjacency instance
Args:
data: Adjacency instance to append
Returns:
out: new appended Adjacency instance
'''
if not isinstance(data, Adjacency):
raise ValueError('Make sure data is a Adjacency instance.')
if self.isempty():
out = data.copy()
else:
out = self.copy()
if self.square_shape() != data.square_shape():
raise ValueError('Data is not the same shape as Adjacency '
'instance.')
out.data = np.vstack([self.data, data.data])
out.is_single_matrix = False
if out.Y.size:
out.Y = self.Y.append(data.Y)
return out
def write(self, file_name, method='long'):
''' Write out Adjacency object to csv file.
Args:
file_name (str): name of file name to write
method (str): method to write out data ['long','square']
'''
if method not in ['long', 'square']:
raise ValueError('Make sure method is ["long","square"].')
if self.is_single_matrix:
if method is 'long':
out = pd.DataFrame(self.data).to_csv(file_name, index=None)
elif method is 'square':
out = pd.DataFrame(self.squareform()).to_csv(file_name,
index=None)
else:
if method is 'long':
out = pd.DataFrame(self.data).to_csv(file_name, index=None)
elif method is 'square':
raise NotImplementedError('Need to decide how we should write '
'out multiple matrices. As separate '
'files?')
def similarity(self, data, plot=False, perm_type='2d', n_permute=5000, metric='spearman', **kwargs):
''' Calculate similarity between two Adjacency matrices.
Default is to use spearman correlation and permutation test.
Args:
data: Adjacency data, or 1-d array same size as self.data
perm_type: '1d','2d', or None
metric: 'spearman','pearson','kendall'
'''
if not isinstance(data, Adjacency):
data2 = Adjacency(data)
else:
data2 = data.copy()
if perm_type is None:
n_permute=0
similarity_func = correlation_permutation
elif perm_type == '1d':
similarity_func = correlation_permutation
elif perm_type == '2d':
similarity_func = matrix_permutation
if self.is_single_matrix:
if plot:
plot_stacked_adjacency(self, data)
return similarity_func(self.data, data2.data, metric=metric, n_permute=n_permute, **kwargs)
else:
if plot:
_, a = plt.subplots(len(self))
for i in a:
plot_stacked_adjacency(self, data, ax=i)
return [similarity_func(x.data, data2.data, metric=metric, n_permute=n_permute, **kwargs) for x in self]
def distance(self, method='correlation', **kwargs):
''' Calculate distance between images within an Adjacency() instance.
Args:
method: type of distance metric (can use any scikit learn or
sciypy metric)
Returns:
dist: Outputs a 2D distance matrix.
'''
return Adjacency(pairwise_distances(self.data, metric=method, **kwargs),
matrix_type='distance')
def threshold(self, upper=None, lower=None, binarize=False):
'''Threshold Adjacency instance. Provide upper and lower values or
percentages to perform two-sided thresholding. Binarize will return
a mask image respecting thresholds if provided, otherwise respecting
every non-zero value.
Args:
upper: (float or str) Upper cutoff for thresholding. If string
will interpret as percentile; can be None for one-sided
thresholding.
lower: (float or str) Lower cutoff for thresholding. If string
will interpret as percentile; can be None for one-sided
thresholding.
binarize (bool): return binarized image respecting thresholds if
provided, otherwise binarize on every non-zero value;
default False
Returns:
Adjacency: thresholded Adjacency instance
'''
b = self.copy()
if isinstance(upper, six.string_types):
if upper[-1] is '%':
upper = np.percentile(b.data, float(upper[:-1]))
if isinstance(lower, six.string_types):
if lower[-1] is '%':
lower = np.percentile(b.data, float(lower[:-1]))
if upper and lower:
b.data[(b.data < upper) & (b.data > lower)] = 0
elif upper and not lower:
b.data[b.data < upper] = 0
elif lower and not upper:
b.data[b.data > lower] = 0
if binarize:
b.data[b.data != 0] = 1
return b
def to_graph(self):
''' Convert Adjacency into networkx graph. only works on
single_matrix for now.'''
if self.is_single_matrix:
if self.matrix_type == 'directed':
G = nx.DiGraph(self.squareform())
else:
G = nx.Graph(self.squareform())
if self.labels is not None:
labels = {x:y for x,y in zip(G.nodes,self.labels)}
nx.relabel_nodes(G, labels, copy=False)
return G
else:
raise NotImplementedError('This function currently only works on '
'single matrices.')
def ttest(self, permutation=False, **kwargs):
''' Calculate ttest across samples.
Args:
permutation: (bool) Run ttest as permutation. Note this can be very slow.
Returns:
out: (dict) contains Adjacency instances of t values (or mean if
running permutation) and Adjacency instance of p values.
'''
if self.is_single_matrix:
raise ValueError('t-test cannot be run on single matrices.')
if permutation:
t = []; p = []
for i in range(self.data.shape[1]):
stats = one_sample_permutation(self.data[:, i], **kwargs)
t.append(stats['mean'])
p.append(stats['p'])
t = Adjacency(np.array(t))
p = Adjacency(np.array(p))
else:
t = self.mean().copy()
p = deepcopy(t)
t.data, p.data = ttest_1samp(self.data, 0, 0)
return {'t': t, 'p':p}
def plot_label_distance(self, labels=None, ax=None):
''' Create a violin plot indicating within and between label distance
Args:
labels (np.array): numpy array of labels to plot
Returns:
violin plot handles
'''
if not self.is_single_matrix:
raise ValueError('This function only works on single adjacency '
'matrices.')
distance = pd.DataFrame(self.squareform())
if labels is None:
labels = np.array(deepcopy(self.labels))
else:
if len(labels) != distance.shape[0]:
raise ValueError('Labels must be same length as distance matrix')
out = pd.DataFrame(columns=['Distance', 'Group', 'Type'], index=None)
for i in np.unique(labels):
tmp_w = pd.DataFrame(columns=out.columns, index=None)
tmp_w['Distance'] = distance.loc[labels == i, labels == i].values[np.triu_indices(sum(labels == i), k=1)]
tmp_w['Type'] = 'Within'
tmp_w['Group'] = i
tmp_b = pd.DataFrame(columns=out.columns, index=None)
tmp_b['Distance'] = distance.loc[labels != i, labels != i].values[np.triu_indices(sum(labels == i), k=1)]
tmp_b['Type'] = 'Between'
tmp_b['Group'] = i
out = out.append(tmp_w).append(tmp_b)
f = sns.violinplot(x="Group", y="Distance", hue="Type", data=out, split=True, inner='quartile',
palette={"Within": "lightskyblue", "Between": "red"}, ax=ax)
f.set_ylabel('Average Distance')
f.set_title('Average Group Distance')
return f
def stats_label_distance(self, labels=None, n_permute=5000, n_jobs=-1):
''' Calculate permutation tests on within and between label distance.
Args:
labels (np.array): numpy array of labels to plot
n_permute (int): number of permutations to run (default=5000)
Returns:
dict: dictionary of within and between group differences
and p-values
'''
if not self.is_single_matrix:
raise ValueError('This function only works on single adjacency '
'matrices.')
distance = pd.DataFrame(self.squareform())
if labels is not None:
labels = deepcopy(self.labels)
else:
if len(labels) != distance.shape[0]:
raise ValueError('Labels must be same length as distance matrix')
out = pd.DataFrame(columns=['Distance', 'Group', 'Type'], index=None)
for i in np.unique(labels):
tmp_w = pd.DataFrame(columns=out.columns, index=None)
tmp_w['Distance'] = distance.loc[labels == i, labels == i].values[np.triu_indices(sum(labels == i), k=1)]
tmp_w['Type'] = 'Within'
tmp_w['Group'] = i
tmp_b = pd.DataFrame(columns=out.columns, index=None)
tmp_b['Distance'] = distance.loc[labels == i, labels != i].values.flatten()
tmp_b['Type'] = 'Between'
tmp_b['Group'] = i
out = out.append(tmp_w).append(tmp_b)
stats = dict()
for i in np.unique(labels):
# Within group test
tmp1 = out.loc[(out['Group'] == i) & (out['Type'] == 'Within'), 'Distance']
tmp2 = out.loc[(out['Group'] == i) & (out['Type'] == 'Between'), 'Distance']
stats[str(i)] = two_sample_permutation(tmp1, tmp2,
n_permute=n_permute, n_jobs=n_jobs)
return stats
def plot_silhouette(self, labels=None, ax=None, permutation_test=True,
n_permute=5000, **kwargs):
'''Create a silhouette plot'''
distance = pd.DataFrame(self.squareform())
if labels is None:
labels = np.array(deepcopy(self.labels))
else:
if len(labels) != distance.shape[0]:
raise ValueError('Labels must be same length as distance matrix')
(f, outAll) = plot_silhouette(distance, labels, ax=None,
permutation_test=True,
n_permute=5000, **kwargs)
return (f,outAll)
def bootstrap(self, function, n_samples=5000, save_weights=False,
n_jobs=-1, random_state=None, *args, **kwargs):
'''Bootstrap an Adjacency method.
Example Useage:
b = dat.bootstrap('mean', n_samples=5000)
b = dat.bootstrap('predict', n_samples=5000, algorithm='ridge')
b = dat.bootstrap('predict', n_samples=5000, save_weights=True)
Args:
function: (str) method to apply to data for each bootstrap
n_samples: (int) number of samples to bootstrap with replacement
save_weights: (bool) Save each bootstrap iteration
(useful for aggregating many bootstraps on a cluster)
n_jobs: (int) The number of CPUs to use to do the computation.
-1 means all CPUs.Returns:
output: summarized studentized bootstrap output
'''
random_state = check_random_state(random_state)
seeds = random_state.randint(MAX_INT, size=n_samples)
bootstrapped = Parallel(n_jobs=n_jobs)(
delayed(_bootstrap_apply_func)(self,
function, random_state=seeds[i], *args, **kwargs)
for i in range(n_samples))
bootstrapped = Adjacency(bootstrapped)
return summarize_bootstrap(bootstrapped, save_weights=save_weights)
def plot_mds(self, n_components=2, metric=True, labels_color=None,
cmap=plt.cm.hot_r, n_jobs=-1, view=(30, 20),
figsize = [12,8], ax = None, *args, **kwargs):
''' Plot Multidimensional Scaling
Args:
n_components: (int) Number of dimensions to project (can be 2 or 3)
metric: (bool) Perform metric or non-metric dimensional scaling; default
labels_color: (str) list of colors for labels, if len(1) then make all same color
n_jobs: (int) Number of parallel jobs
view: (tuple) view for 3-Dimensional plot; default (30,20)
Returns:
fig: returns matplotlib figure
'''
if self.matrix_type != 'distance':
raise ValueError("MDS only works on distance matrices.")
if not self.is_single_matrix:
raise ValueError("MDS only works on single matrices.")
if n_components not in [2,3]:
raise ValueError('Cannot plot {0}-d image'.format(n_components))
if labels_color is not None:
if self.labels is None:
raise ValueError("Make sure that Adjacency object has labels specified.")
if len(self.labels) != len(labels_color):
raise ValueError("Length of labels_color must match self.labels.")
# Run MDS
mds = MDS(n_components=n_components, metric=metric, n_jobs=n_jobs,
dissimilarity="precomputed", *args, **kwargs)
proj = mds.fit_transform(self.squareform())
# Create Plot
if ax == None: # Create axis
returnFig = True
fig = plt.figure(figsize=figsize)
if n_components == 3:
ax = fig.add_subplot(111, projection='3d')
ax.view_init(*view)
elif n_components == 2:
ax = fig.add_subplot(111)
# Plot dots
if n_components == 3:
ax.scatter(proj[:, 0], proj[:, 1], proj[:, 2], s=1, c='k')
elif n_components == 2:
ax.scatter(proj[:, 0], proj[:, 1], s=1, c='k')
# Plot labels
if labels_color is None:
labels_color = ['black'] * len(self.labels)
if n_components == 3:
for ((x, y, z), label, color) in zip(proj, self.labels, labels_color):
ax.text(x, y, z, label, color='white', #color,
bbox=dict(facecolor=color, alpha=1, boxstyle="round,pad=0.3"))
else:
for ((x, y), label, color) in zip(proj, self.labels, labels_color):
ax.text(x, y, label, color='white', #color,
bbox=dict(facecolor=color, alpha=1, boxstyle="round,pad=0.3"))
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if returnFig:
return fig
def distance_to_similarity(self, beta=1):
'''Convert distance matrix to similarity matrix
Args:
beta: parameter to scale exponential function (default: 1)
Returns:
Adjacency object
'''
if self.matrix_type == 'distance':
return Adjacency(np.exp(-beta*self.squareform()/self.squareform().std()),
labels=self.labels, matrix_type='similarity')
else:
raise ValueError('Matrix is not a distance matrix.')
def similarity_to_distance(self):
'''Convert similarity matrix to distance matrix'''
if self.matrix_type == 'similarity':
return Adjacency(1-self.squareform(),
labels=self.labels, matrix_type='distance')
else:
raise ValueError('Matrix is not a similarity matrix.')
def within_cluster_mean(self, clusters = None):
''' This function calculates mean within cluster labels
Args:
clusters: list of cluster labels
Returns:
dict: within cluster means
'''
distance=pd.DataFrame(self.squareform())
clusters = np.array(clusters)
if len(clusters) != distance.shape[0]:
raise ValueError('Cluster labels must be same length as distance matrix')
out = pd.DataFrame(columns=['Mean','Label'],index=None)
out = {}
for i in list(set(clusters)):
out[i] = np.mean(distance.loc[clusters==i,clusters==i].values[np.triu_indices(sum(clusters==i),k=1)])
return out
def regress(self, X, mode='ols', **kwargs):
''' Run a regression on an adjacency instance.
You can decompose an adjacency instance with another adjacency instance.
You can also decompose each pixel by passing a design_matrix instance.
Args:
X: Design matrix can be an Adjacency or Design_Matrix instance
method: type of regression (default: ols)
Returns:
'''
stats = {}
if isinstance(X, Adjacency):
if X.square_shape()[0] != self.square_shape()[0]:
raise ValueError('Adjacency instances must be the same size.')
b,t,p,_,res = regression(X.data.T, self.data, mode=mode, **kwargs)
stats['beta'],stats['t'],stats['p'],stats['residual'] = (b,t,p,res)
elif isinstance(X, Design_Matrix):
if X.shape[0] != len(self):
raise ValueError('Design matrix must have same number of observations as Adjacency')
b,t,p,df,res = regression(X, self.data, mode=mode, **kwargs)
mode = 'ols'
stats['beta'], stats['t'], stats['p'] = [x for x in self[:3]]
stats['beta'].data, stats['t'].data, stats['p'].data = b.squeeze(), t.squeeze(), p.squeeze()
stats['residual'] = self.copy()
stats['residual'].data = res
else:
raise ValueError('X must be a Design_Matrix or Adjacency Instance.')
return stats
| mit |
tmhm/scikit-learn | examples/svm/plot_weighted_samples.py | 188 | 1943 | """
=====================
SVM: Weighted samples
=====================
Plot decision function of a weighted dataset, where the size of points
is proportional to its weight.
The sample weighting rescales the C parameter, which means that the classifier
puts more emphasis on getting these points right. The effect might often be
subtle.
To emphasize the effect here, we particularly weight outliers, making the
deformation of the decision boundary very visible.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
def plot_decision_function(classifier, sample_weight, axis, title):
# plot the decision function
xx, yy = np.meshgrid(np.linspace(-4, 5, 500), np.linspace(-4, 5, 500))
Z = classifier.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# plot the line, the points, and the nearest vectors to the plane
axis.contourf(xx, yy, Z, alpha=0.75, cmap=plt.cm.bone)
axis.scatter(X[:, 0], X[:, 1], c=Y, s=100 * sample_weight, alpha=0.9,
cmap=plt.cm.bone)
axis.axis('off')
axis.set_title(title)
# we create 20 points
np.random.seed(0)
X = np.r_[np.random.randn(10, 2) + [1, 1], np.random.randn(10, 2)]
Y = [1] * 10 + [-1] * 10
sample_weight_last_ten = abs(np.random.randn(len(X)))
sample_weight_constant = np.ones(len(X))
# and bigger weights to some outliers
sample_weight_last_ten[15:] *= 5
sample_weight_last_ten[9] *= 15
# for reference, first fit without class weights
# fit the model
clf_weights = svm.SVC()
clf_weights.fit(X, Y, sample_weight=sample_weight_last_ten)
clf_no_weights = svm.SVC()
clf_no_weights.fit(X, Y)
fig, axes = plt.subplots(1, 2, figsize=(14, 6))
plot_decision_function(clf_no_weights, sample_weight_constant, axes[0],
"Constant weights")
plot_decision_function(clf_weights, sample_weight_last_ten, axes[1],
"Modified weights")
plt.show()
| bsd-3-clause |
ubic135/odoo-design | addons/mrp/wizard/change_production_qty.py | 245 | 4852 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
class change_production_qty(osv.osv_memory):
_name = 'change.production.qty'
_description = 'Change Quantity of Products'
_columns = {
'product_qty': fields.float('Product Qty', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
}
def default_get(self, cr, uid, fields, context=None):
""" To get default values for the object.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param fields: List of fields for which we want default values
@param context: A standard dictionary
@return: A dictionary which of fields with values.
"""
if context is None:
context = {}
res = super(change_production_qty, self).default_get(cr, uid, fields, context=context)
prod_obj = self.pool.get('mrp.production')
prod = prod_obj.browse(cr, uid, context.get('active_id'), context=context)
if 'product_qty' in fields:
res.update({'product_qty': prod.product_qty})
return res
def _update_product_to_produce(self, cr, uid, prod, qty, context=None):
move_lines_obj = self.pool.get('stock.move')
for m in prod.move_created_ids:
move_lines_obj.write(cr, uid, [m.id], {'product_uom_qty': qty})
def change_prod_qty(self, cr, uid, ids, context=None):
"""
Changes the Quantity of Product.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return:
"""
record_id = context and context.get('active_id',False)
assert record_id, _('Active Id not found')
prod_obj = self.pool.get('mrp.production')
bom_obj = self.pool.get('mrp.bom')
move_obj = self.pool.get('stock.move')
for wiz_qty in self.browse(cr, uid, ids, context=context):
prod = prod_obj.browse(cr, uid, record_id, context=context)
prod_obj.write(cr, uid, [prod.id], {'product_qty': wiz_qty.product_qty})
prod_obj.action_compute(cr, uid, [prod.id])
for move in prod.move_lines:
bom_point = prod.bom_id
bom_id = prod.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, product_id=prod.product_id.id, context=context)
if not bom_id:
raise osv.except_osv(_('Error!'), _("Cannot find bill of material for this product."))
prod_obj.write(cr, uid, [prod.id], {'bom_id': bom_id})
bom_point = bom_obj.browse(cr, uid, [bom_id])[0]
if not bom_id:
raise osv.except_osv(_('Error!'), _("Cannot find bill of material for this product."))
factor = prod.product_qty * prod.product_uom.factor / bom_point.product_uom.factor
product_details, workcenter_details = \
bom_obj._bom_explode(cr, uid, bom_point, prod.product_id, factor / bom_point.product_qty, [], context=context)
for r in product_details:
if r['product_id'] == move.product_id.id:
move_obj.write(cr, uid, [move.id], {'product_uom_qty': r['product_qty']})
if prod.move_prod_id:
move_obj.write(cr, uid, [prod.move_prod_id.id], {'product_uom_qty' : wiz_qty.product_qty})
self._update_product_to_produce(cr, uid, prod, wiz_qty.product_qty, context=context)
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
solintegra/addons | mrp_project_link/models/project_project.py | 16 | 2484 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields, api
class ProjectProject(models.Model):
_inherit = 'project.project'
@api.one
def _project_shortcut_count(self):
production_obj = self.env['mrp.production']
procurement_obj = self.env['procurement.order']
productions = production_obj.search([('project_id', '=', self.id)])
procurements = procurement_obj.search([('main_project_id', '=',
self.id)])
self.production_count = len(productions)
self.procurement_count = len(procurements)
production_count = fields.Integer(string='Manufacturing Count',
compute=_project_shortcut_count)
procurement_count = fields.Integer(string='Procurement Count',
compute=_project_shortcut_count)
class Task(models.Model):
_inherit = "project.task"
mrp_production_id = fields.Many2one('mrp.production',
string='Manufacturing Order')
mrp_sch_products = fields.One2many(
"mrp.production.product.line", 'task_id',
related='mrp_production_id.product_lines', store=False,
string='Scheduled Products')
wk_sch_products = fields.One2many(
"mrp.production.product.line", 'task_id',
related='wk_order.product_line', store=False,
string='Scheduled Products')
final_product = fields.Many2one('product.product',
string='Product to Produce', store=False,
related='mrp_production_id.product_id')
| agpl-3.0 |
themiurgo/csvkit | csvkit/utilities/in2csv.py | 20 | 3537 | #!/usr/bin/env python
from csvkit import convert
from csvkit.cli import CSVKitUtility
class In2CSV(CSVKitUtility):
description = 'Convert common, but less awesome, tabular data formats to CSV.'
epilog='Some command line flags only pertain to specific input formats.'
override_flags = ['f']
def add_arguments(self):
self.argparser.add_argument(metavar="FILE", nargs='?', dest='input_path',
help='The CSV file to operate on. If omitted, will accept input on STDIN.')
self.argparser.add_argument('-f', '--format', dest='filetype',
help='The format of the input file. If not specified will be inferred from the file type. Supported formats: %s.' % ', '.join(sorted(convert.SUPPORTED_FORMATS)))
self.argparser.add_argument('-s', '--schema', dest='schema',
help='Specifies a CSV-formatted schema file for converting fixed-width files. See documentation for details.')
self.argparser.add_argument('-k', '--key', dest='key',
help='Specifies a top-level key to use look within for a list of objects to be converted when processing JSON.')
self.argparser.add_argument('-y', '--snifflimit', dest='snifflimit', type=int,
help='Limit CSV dialect sniffing to the specified number of bytes. Specify "0" to disable sniffing entirely.')
self.argparser.add_argument('--sheet', dest='sheet',
help='The name of the XLSX sheet to operate on.')
self.argparser.add_argument('--no-inference', dest='no_inference', action='store_true',
help='Disable type inference when parsing the input.')
def main(self):
if self.args.filetype:
filetype = self.args.filetype
if filetype not in convert.SUPPORTED_FORMATS:
self.argparser.error('"%s" is not a supported format' % self.args.filetype)
elif self.args.schema:
filetype = 'fixed'
elif self.args.key:
filetype = 'json'
else:
if not self.args.input_path or self.args.input_path == '-':
self.argparser.error('You must specify a format when providing data via STDIN (pipe).')
filetype = convert.guess_format(self.args.input_path)
if not filetype:
self.argparser.error('Unable to automatically determine the format of the input file. Try specifying a format with --format.')
if filetype in ('xls', 'xlsx'):
self.input_file = open(self.args.input_path, 'rb')
else:
self.input_file = self._open_input_file(self.args.input_path)
kwargs = self.reader_kwargs
if self.args.schema:
kwargs['schema'] = self._open_input_file(self.args.schema)
if self.args.key:
kwargs['key'] = self.args.key
if self.args.snifflimit:
kwargs['snifflimit'] = self.args.snifflimit
if self.args.sheet:
kwargs['sheet'] = self.args.sheet
if self.args.no_inference:
kwargs['type_inference'] = False
if filetype == 'csv' and self.args.no_header_row:
kwargs['no_header_row'] = True
# Fixed width can be processed as a stream
if filetype == 'fixed':
kwargs['output'] = self.output_file
data = convert.convert(self.input_file, filetype, **kwargs)
self.output_file.write(data)
def launch_new_instance():
utility = In2CSV()
utility.main()
if __name__ == "__main__":
launch_new_instance()
| mit |
sjl767/woo | py/tests/grid.py | 1 | 3704 | # encoding: utf-8
# 2013 © Václav Šmilauer <[email protected]>
import unittest
from minieigen import *
import woo._customConverters
import woo.dem
class TestGridStore(unittest.TestCase):
def setUp(self):
self.gs=woo.dem.GridStore(gridSize=(5,6,7),cellLen=4,denseLock=True,exNumMaps=4)
self.ijk=Vector3i(2,3,4)
def testEx(self):
'Grid: storage: dense and extra data'
for i in range(0,10): self.gs.append(self.ijk,i)
self.assert_(self.gs[self.ijk]==list(range(0,10)))
dense,extra=self.gs._rawData(self.ijk)
# print self.gs[self.ijk],dense,extra
self.assert_(dense==[10,0,1,2,3])
self.assert_(extra[:6]==[4,5,6,7,8,9])
self.assert_(self.gs.countEx()=={tuple(self.ijk):6})
def testAppend(self):
'Grid: storage: appending data'
for i in range(0,13):
self.gs.append(self.ijk,i)
self.assert_(i==self.gs[self.ijk][self.gs.size(self.ijk)-1])
def testStorageOrder(self):
'Grid: storage: storage order'
self.assert_(self.gs.lin2ijk(1)==(0,0,1)) # last varies the fastest
self.assert_(self.gs.ijk2lin((0,0,1))==1)
def testPyAcces(self):
'Grid: storage: python access'
self.gs[self.ijk]=range(0,10)
self.assert_(self.gs[self.ijk]==list(range(0,10)))
self.assert_(self.gs.countEx()=={tuple(self.ijk):6})
del self.gs[self.ijk]
self.assert_(self.gs.countEx()=={})
self.assert_(self.gs.size(self.ijk)==0)
self.assert_(self.gs[self.ijk]==[])
def testComplement(self):
'Grid: storage: complements'
# make insignificant parameters different
g1=woo.dem.GridStore(gridSize=(3,3,3),cellLen=2,denseLock=False,exNumMaps=4)
g2=woo.dem.GridStore(gridSize=(3,3,3),cellLen=3,denseLock=True,exNumMaps=2)
c1,c2,c3,c4=(1,1,1),(2,2,2),(2,1,2),(1,2,1)
g1[c1]=[0,1]; g2[c1]=[1,2] # mixed scenario
g1[c2]=[1,2,3]; g2[c2]=[] # b is empty (cheaper copy branch)
g2[c3]=[]; g2[c3]=[1,2,3] # a is empty (cheaper copy branch)
g2[c4]=[]; g2[c4]=[]
# incompatible dimensions
self.assertRaises(RuntimeError,lambda: g1.complements(woo.dem.GridStore(gridSize=(2,2,2))))
# setMinSize determines when is boost::range::set_difference and when naive traversal used (presumably faster for very small sets)
for setMinSize in (0,1,2,3):
g12,g21=g1.complements(g2,setMinSize=setMinSize)
if 0:
print setMinSize,'g1',g1[c1],g1[c2],g1[c3],g1[c4]
print setMinSize,'g2',g2[c1],g2[c2],g2[c3],g2[c4]
print setMinSize,'g12',g12[c1],g12[c2],g12[c3],g12[c4]
print setMinSize,'g21',g21[c1],g21[c2],g21[c3],g12[c4]
self.assert_(g12[c1]==[0])
self.assert_(g21[c1]==[2])
self.assert_(g12[c2]==[1,2,3])
self.assert_(g21[c2]==[])
self.assert_(g12[c3]==[])
self.assert_(g21[c3]==[1,2,3])
self.assert_(g21[c4]==[])
self.assert_(g12[c4]==[])
class TestGridColliderBasics(unittest.TestCase):
def testParams(self):
'GridCollider: used-definable parameters'
gc=woo.dem.GridCollider()
gc.domain=((0,0,0),(1,1,1))
gc.minCellSize=.1
self.assert_(gc.dim==Vector3i(10,10,10))
self.assertAlmostEqual(gc.cellSize[0],.1)
self.assertRaises(RuntimeError,lambda: setattr(gc,'minCellSize',0))
gc.minCellSize=.1
self.assertRaises(RuntimeError,lambda: setattr(gc,'domain',((0,0,0),(0,0,0))))
self.assertRaises(RuntimeError,lambda: setattr(gc,'domain',((0,0,0),(-1,-1,-1))))
| gpl-2.0 |
hazrpg/calibre | src/odf/easyliststyle.py | 95 | 3771 | # -*- coding: utf-8 -*-
# Create a <text:list-style> element from a text string.
# Copyright (C) 2008 J. David Eisenberg
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Contributor(s):
#
import re
from style import Style, TextProperties, ListLevelProperties
from text import ListStyle,ListLevelStyleNumber,ListLevelStyleBullet
"""
Create a <text:list-style> element from a string or array.
List styles require a lot of code to create one level at a time.
These routines take a string and delimiter, or a list of
strings, and creates a <text:list-style> element for you.
Each item in the string (or array) represents a list level
* style for levels 1-10.</p>
*
* <p>If an item contains <code>1</code>, <code>I</code>,
* <code>i</code>, <code>A</code>, or <code>a</code>, then it is presumed
* to be a numbering style; otherwise it is a bulleted style.</p>
"""
_MAX_LIST_LEVEL = 10
SHOW_ALL_LEVELS = True
SHOW_ONE_LEVEL = False
def styleFromString(name, specifiers, delim, spacing, showAllLevels):
specArray = specifiers.split(delim)
return styleFromList( name, specArray, spacing, showAllLevels )
def styleFromList( styleName, specArray, spacing, showAllLevels):
bullet = ""
numPrefix = ""
numSuffix = ""
numberFormat = ""
cssLengthNum = 0
cssLengthUnits = ""
numbered = False
displayLevels = 0
listStyle = ListStyle(name=styleName)
numFormatPattern = re.compile("([1IiAa])")
cssLengthPattern = re.compile("([^a-z]+)\\s*([a-z]+)?")
m = cssLengthPattern.search( spacing )
if (m != None):
cssLengthNum = float(m.group(1))
if (m.lastindex == 2):
cssLengthUnits = m.group(2)
i = 0
while i < len(specArray):
specification = specArray[i]
m = numFormatPattern.search(specification)
if (m != None):
numberFormat = m.group(1)
numPrefix = specification[0:m.start(1)]
numSuffix = specification[m.end(1):]
bullet = ""
numbered = True
if (showAllLevels):
displayLevels = i + 1
else:
displayLevels = 1
else: # it's a bullet style
bullet = specification
numPrefix = ""
numSuffix = ""
numberFormat = ""
displayLevels = 1
numbered = False
if (numbered):
lls = ListLevelStyleNumber(level=(i+1))
if (numPrefix != ''):
lls.setAttribute('numprefix', numPrefix)
if (numSuffix != ''):
lls.setAttribute('numsuffix', numSuffix)
lls.setAttribute('displaylevels', displayLevels)
else:
lls = ListLevelStyleBullet(level=(i+1),bulletchar=bullet[0])
llp = ListLevelProperties()
llp.setAttribute('spacebefore', str(cssLengthNum * (i+1)) + cssLengthUnits)
llp.setAttribute('minlabelwidth', str(cssLengthNum) + cssLengthUnits)
lls.addElement( llp )
listStyle.addElement(lls)
i += 1
return listStyle
# vim: set expandtab sw=4 :
| gpl-3.0 |
nicememory/pie | pyglet/pyglet/font/ttf.py | 3 | 24856 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# $Id$
"""
Implementation of the Truetype file format.
Typical applications will not need to use this module directly; look at
`pyglyph.font` instead.
References:
* http://developer.apple.com/fonts/TTRefMan/RM06
* http://www.microsoft.com/typography/otspec
"""
from __future__ import division
from builtins import zip
from builtins import chr
from builtins import range
from builtins import object
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import codecs
import os
import mmap
import struct
class TruetypeInfo(object):
"""Information about a single Truetype face.
The class memory-maps the font file to read the tables, so
it is vital that you call the `close` method to avoid large memory
leaks. Once closed, you cannot call any of the ``get_*`` methods.
Not all tables have been implemented yet (or likely ever will).
Currently only the name and metric tables are read; in particular
there is no glyph or hinting information.
"""
_name_id_lookup = {
'copyright': 0,
'family': 1,
'subfamily': 2,
'identifier': 3,
'name': 4,
'version': 5,
'postscript': 6,
'trademark': 7,
'manufacturer': 8,
'designer': 9,
'description': 10,
'vendor-url': 11,
'designer-url': 12,
'license': 13,
'license-url': 14,
'preferred-family': 16,
'preferred-subfamily': 17,
'compatible-name': 18,
'sample': 19,
}
_platform_id_lookup = {
'unicode': 0,
'macintosh': 1,
'iso': 2,
'microsoft': 3,
'custom': 4
}
_microsoft_encoding_lookup = {
1: 'utf_16_be',
2: 'shift_jis',
4: 'big5',
6: 'johab',
10: 'utf_16_be'
}
_macintosh_encoding_lookup = {
0: 'mac_roman'
}
def __init__(self, filename):
"""Read the given TrueType file.
:Parameters:
`filename`
The name of any Windows, OS2 or Macintosh Truetype file.
The object must be closed (see `close`) after use.
An exception will be raised if the file does not exist or cannot
be read.
"""
if not filename: filename = ''
len = os.stat(filename).st_size
self._fileno = os.open(filename, os.O_RDONLY)
if hasattr(mmap, 'MAP_SHARED'):
self._data = mmap.mmap(self._fileno, len, mmap.MAP_SHARED,
mmap.PROT_READ)
else:
self._data = mmap.mmap(self._fileno, len, None, mmap.ACCESS_READ)
offsets = _read_offset_table(self._data, 0)
self._tables = {}
for table in _read_table_directory_entry.array(self._data,
offsets.size, offsets.num_tables):
self._tables[table.tag] = table
self._names = None
self._horizontal_metrics = None
self._character_advances = None
self._character_kernings = None
self._glyph_kernings = None
self._character_map = None
self._glyph_map = None
self._font_selection_flags = None
self.header = \
_read_head_table(self._data, self._tables['head'].offset)
self.horizontal_header = \
_read_horizontal_header(self._data, self._tables['hhea'].offset)
def get_font_selection_flags(self):
"""Return the font selection flags, as defined in OS/2 table"""
if not self._font_selection_flags:
OS2_table = \
_read_OS2_table(self._data, self._tables['OS/2'].offset)
self._font_selection_flags = OS2_table.fs_selection
return self._font_selection_flags
def is_bold(self):
"""Returns True iff the font describes itself as bold."""
return bool(self.get_font_selection_flags() & 0x20)
def is_italic(self):
"""Returns True iff the font describes itself as italic."""
return bool(self.get_font_selection_flags() & 0x1)
def get_names(self):
"""Returns a dictionary of names defined in the file.
The key of each item is a tuple of ``platform_id``, ``name_id``,
where each ID is the number as described in the Truetype format.
The value of each item is a tuple of
``encoding_id``, ``language_id``, ``value``, where ``value`` is
an encoded string.
"""
if self._names:
return self._names
naming_table = \
_read_naming_table(self._data, self._tables['name'].offset)
name_records = \
_read_name_record.array(self._data,
self._tables['name'].offset + naming_table.size,
naming_table.count)
storage = naming_table.string_offset + self._tables['name'].offset
self._names = {}
for record in name_records:
value = self._data[record.offset + storage:\
record.offset + storage + record.length]
key = record.platform_id, record.name_id
value = (record.encoding_id, record.language_id, value)
if not key in self._names:
self._names[key] = []
self._names[key].append(value)
return self._names
def get_name(self, name, platform=None, languages=None):
"""Returns the value of the given name in this font.
:Parameters:
`name`
Either an integer, representing the name_id desired (see
font format); or a string describing it, see below for
valid names.
`platform`
Platform for the requested name. Can be the integer ID,
or a string describing it. By default, the Microsoft
platform is searched first, then Macintosh.
`languages`
A list of language IDs to search. The first language
which defines the requested name will be used. By default,
all English dialects are searched.
If the name is not found, ``None`` is returned. If the name
is found, the value will be decoded and returned as a unicode
string. Currently only some common encodings are supported.
Valid names to request are (supply as a string)::
'copyright'
'family'
'subfamily'
'identifier'
'name'
'version'
'postscript'
'trademark'
'manufacturer'
'designer'
'description'
'vendor-url'
'designer-url'
'license'
'license-url'
'preferred-family'
'preferred-subfamily'
'compatible-name'
'sample'
Valid platforms to request are (supply as a string)::
'unicode'
'macintosh'
'iso'
'microsoft'
'custom'
"""
names = self.get_names()
if type(name) == str:
name = self._name_id_lookup[name]
if not platform:
for platform in ('microsoft','macintosh'):
value = self.get_name(name, platform, languages)
if value:
return value
if type(platform) == str:
platform = self._platform_id_lookup[platform]
if not (platform, name) in names:
return None
if platform == 3: # setup for microsoft
encodings = self._microsoft_encoding_lookup
if not languages:
# Default to english languages for microsoft
languages = (0x409,0x809,0xc09,0x1009,0x1409,0x1809)
elif platform == 1: # setup for macintosh
encodings = self.__macintosh_encoding_lookup
if not languages:
# Default to english for macintosh
languages = (0,)
for record in names[(platform, name)]:
if record[1] in languages and record[0] in encodings:
decoder = codecs.getdecoder(encodings[record[0]])
return decoder(record[2])[0]
return None
def get_horizontal_metrics(self):
"""Return all horizontal metric entries in table format."""
if not self._horizontal_metrics:
ar = _read_long_hor_metric.array(self._data,
self._tables['hmtx'].offset,
self.horizontal_header.number_of_h_metrics)
self._horizontal_metrics = ar
return self._horizontal_metrics
def get_character_advances(self):
"""Return a dictionary of character->advance.
They key of the dictionary is a unit-length unicode string,
and the value is a float giving the horizontal advance in
em.
"""
if self._character_advances:
return self._character_advances
ga = self.get_glyph_advances()
gmap = self.get_glyph_map()
self._character_advances = {}
for i in range(len(ga)):
if i in gmap and not gmap[i] in self._character_advances:
self._character_advances[gmap[i]] = ga[i]
return self._character_advances
def get_glyph_advances(self):
"""Return a dictionary of glyph->advance.
They key of the dictionary is the glyph index and the value is a float
giving the horizontal advance in em.
"""
hm = self.get_horizontal_metrics()
return [float(m.advance_width) / self.header.units_per_em for m in hm]
def get_character_kernings(self):
"""Return a dictionary of (left,right)->kerning
The key of the dictionary is a tuple of ``(left, right)``
where each element is a unit-length unicode string. The
value of the dictionary is the horizontal pairwise kerning
in em.
"""
if not self._character_kernings:
gmap = self.get_glyph_map()
kerns = self.get_glyph_kernings()
self._character_kernings = {}
for pair, value in kerns.items():
lglyph, rglyph = pair
lchar = lglyph in gmap and gmap[lglyph] or None
rchar = rglyph in gmap and gmap[rglyph] or None
if lchar and rchar:
self._character_kernings[(lchar, rchar)] = value
return self._character_kernings
def get_glyph_kernings(self):
"""Return a dictionary of (left,right)->kerning
The key of the dictionary is a tuple of ``(left, right)``
where each element is a glyph index. The value of the dictionary is
the horizontal pairwise kerning in em.
"""
if self._glyph_kernings:
return self._glyph_kernings
header = \
_read_kern_header_table(self._data, self._tables['kern'].offset)
offset = self._tables['kern'].offset + header.size
kernings = {}
for i in range(header.n_tables):
header = _read_kern_subtable_header(self._data, offset)
if header.coverage & header.horizontal_mask \
and not header.coverage & header.minimum_mask \
and not header.coverage & header.perpendicular_mask:
if header.coverage & header.format_mask == 0:
self._add_kernings_format0(kernings, offset + header.size)
offset += header.length
self._glyph_kernings = kernings
return kernings
def _add_kernings_format0(self, kernings, offset):
header = _read_kern_subtable_format0(self._data, offset)
kerning_pairs = _read_kern_subtable_format0Pair.array(self._data,
offset + header.size, header.n_pairs)
for pair in kerning_pairs:
if (pair.left, pair.right) in kernings:
kernings[(pair.left, pair.right)] += pair.value \
/ float(self.header.units_per_em)
else:
kernings[(pair.left, pair.right)] = pair.value \
/ float(self.header.units_per_em)
def get_glyph_map(self):
"""Calculate and return a reverse character map.
Returns a dictionary where the key is a glyph index and the
value is a unit-length unicode string.
"""
if self._glyph_map:
return self._glyph_map
cmap = self.get_character_map()
self._glyph_map = {}
for ch, glyph in cmap.items():
if not glyph in self._glyph_map:
self._glyph_map[glyph] = ch
return self._glyph_map
def get_character_map(self):
"""Return the character map.
Returns a dictionary where the key is a unit-length unicode
string and the value is a glyph index. Currently only
format 4 character maps are read.
"""
if self._character_map:
return self._character_map
cmap = _read_cmap_header(self._data, self._tables['cmap'].offset)
records = _read_cmap_encoding_record.array(self._data,
self._tables['cmap'].offset + cmap.size, cmap.num_tables)
self._character_map = {}
for record in records:
if record.platform_id == 3 and record.encoding_id == 1:
# Look at Windows Unicode charmaps only
offset = self._tables['cmap'].offset + record.offset
format_header = _read_cmap_format_header(self._data, offset)
if format_header.format == 4:
self._character_map = \
self._get_character_map_format4(offset)
break
return self._character_map
def _get_character_map_format4(self, offset):
# This is absolutely, without question, the *worst* file
# format ever. Whoever the fuckwit is that thought this up is
# a fuckwit.
header = _read_cmap_format4Header(self._data, offset)
seg_count = header.seg_count_x2 // 2
array_size = struct.calcsize('>%dH' % seg_count)
end_count = self._read_array('>%dH' % seg_count,
offset + header.size)
start_count = self._read_array('>%dH' % seg_count,
offset + header.size + array_size + 2)
id_delta = self._read_array('>%dh' % seg_count,
offset + header.size + array_size + 2 + array_size)
id_range_offset_address = \
offset + header.size + array_size + 2 + array_size + array_size
id_range_offset = self._read_array('>%dH' % seg_count,
id_range_offset_address)
character_map = {}
for i in range(0, seg_count):
if id_range_offset[i] != 0:
if id_range_offset[i] == 65535:
continue # Hack around a dodgy font (babelfish.ttf)
for c in range(start_count[i], end_count[i] + 1):
addr = id_range_offset[i] + 2*(c - start_count[i]) + \
id_range_offset_address + 2*i
g = struct.unpack('>H', self._data[addr:addr+2])[0]
if g != 0:
character_map[chr(c)] = (g + id_delta[i]) % 65536
else:
for c in range(start_count[i], end_count[i] + 1):
g = (c + id_delta[i]) % 65536
if g != 0:
character_map[chr(c)] = g
return character_map
def _read_array(self, format, offset):
size = struct.calcsize(format)
return struct.unpack(format, self._data[offset:offset+size])
def close(self):
"""Close the font file.
This is a good idea, since the entire file is memory mapped in
until this method is called. After closing cannot rely on the
``get_*`` methods.
"""
self._data.close()
os.close(self._fileno)
def _read_table(*entries):
""" Generic table constructor used for table formats listed at
end of file."""
fmt = '>'
names = []
for entry in entries:
name, type = entry.split(':')
names.append(name)
fmt += type
class _table_class(object):
size = struct.calcsize(fmt)
def __init__(self, data, offset):
items = struct.unpack(fmt, data[offset:offset+self.size])
self.pairs = list(zip(names, items))
for name, value in self.pairs:
setattr(self, name, value)
def __repr__(self):
s = '{' + ', '.join(['%s = %s' % (name, value) \
for name, value in self.pairs]) + '}'
return s
@staticmethod
def array(data, offset, count):
tables = []
for i in range(count):
tables.append(_table_class(data, offset))
offset += _table_class.size
return tables
return _table_class
# Table formats (see references)
_read_offset_table = _read_table('scalertype:I',
'num_tables:H',
'search_range:H',
'entry_selector:H',
'range_shift:H')
_read_table_directory_entry = _read_table('tag:4s',
'check_sum:I',
'offset:I',
'length:I')
_read_head_table = _read_table('version:i',
'font_revision:i',
'check_sum_adjustment:L',
'magic_number:L',
'flags:H',
'units_per_em:H',
'created:Q',
'modified:Q',
'x_min:h',
'y_min:h',
'x_max:h',
'y_max:h',
'mac_style:H',
'lowest_rec_p_pEM:H',
'font_direction_hint:h',
'index_to_loc_format:h',
'glyph_data_format:h')
_read_OS2_table = _read_table('version:H',
'x_avg_char_width:h',
'us_weight_class:H',
'us_width_class:H',
'fs_type:H',
'y_subscript_x_size:h',
'y_subscript_y_size:h',
'y_subscript_x_offset:h',
'y_subscript_y_offset:h',
'y_superscript_x_size:h',
'y_superscript_y_size:h',
'y_superscript_x_offset:h',
'y_superscript_y_offset:h',
'y_strikeout_size:h',
'y_strikeout_position:h',
's_family_class:h',
'panose1:B',
'panose2:B',
'panose3:B',
'panose4:B',
'panose5:B',
'panose6:B',
'panose7:B',
'panose8:B',
'panose9:B',
'panose10:B',
'ul_unicode_range1:L',
'ul_unicode_range2:L',
'ul_unicode_range3:L',
'ul_unicode_range4:L',
'ach_vend_id:I',
'fs_selection:H',
'us_first_char_index:H',
'us_last_char_index:H',
's_typo_ascender:h',
's_typo_descender:h',
's_typo_line_gap:h',
'us_win_ascent:H',
'us_win_descent:H',
'ul_code_page_range1:L',
'ul_code_page_range2:L',
'sx_height:h',
's_cap_height:h',
'us_default_char:H',
'us_break_char:H',
'us_max_context:H')
_read_kern_header_table = _read_table('version_num:H',
'n_tables:H')
_read_kern_subtable_header = _read_table('version:H',
'length:H',
'coverage:H')
_read_kern_subtable_header.horizontal_mask = 0x1
_read_kern_subtable_header.minimum_mask = 0x2
_read_kern_subtable_header.perpendicular_mask = 0x4
_read_kern_subtable_header.override_mask = 0x5
_read_kern_subtable_header.format_mask = 0xf0
_read_kern_subtable_format0 = _read_table('n_pairs:H',
'search_range:H',
'entry_selector:H',
'range_shift:H')
_read_kern_subtable_format0Pair = _read_table('left:H',
'right:H',
'value:h')
_read_cmap_header = _read_table('version:H',
'num_tables:H')
_read_cmap_encoding_record = _read_table('platform_id:H',
'encoding_id:H',
'offset:L')
_read_cmap_format_header = _read_table('format:H',
'length:H')
_read_cmap_format4Header = _read_table('format:H',
'length:H',
'language:H',
'seg_count_x2:H',
'search_range:H',
'entry_selector:H',
'range_shift:H')
_read_horizontal_header = _read_table('version:i',
'Advance:h',
'Descender:h',
'LineGap:h',
'advance_width_max:H',
'min_left_side_bearing:h',
'min_right_side_bearing:h',
'x_max_extent:h',
'caret_slope_rise:h',
'caret_slope_run:h',
'caret_offset:h',
'reserved1:h',
'reserved2:h',
'reserved3:h',
'reserved4:h',
'metric_data_format:h',
'number_of_h_metrics:H')
_read_long_hor_metric = _read_table('advance_width:H',
'lsb:h')
_read_naming_table = _read_table('format:H',
'count:H',
'string_offset:H')
_read_name_record = _read_table('platform_id:H',
'encoding_id:H',
'language_id:H',
'name_id:H',
'length:H',
'offset:H')
| apache-2.0 |
catnet23/PocketMap | pogom/fakePogoApi.py | 32 | 1370 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import struct
from time import time
from .utils import get_args
class FakePogoApi:
def __init__(self, mock):
# Fake a 24 hour auth token.
self._auth_provider = type('', (object,), {
"_ticket_expire": (time() + (3600 * 24)) * 1000})()
self.inited = False
self.mock = mock
def set_proxy(self, proxy_config):
pass
def activate_signature(self, library):
pass
def set_position(self, lat, lng, alt):
# Meters radius (very, very rough approximation -- deal with it.)
if not self.inited:
args = get_args()
radius = 140 * args.step_limit
requests.get('{}/login/{}/{}/{}'.format(
self.mock, lat, lng, radius))
self.inited = True
def set_authentication(self, provider=None, oauth2_refresh_token=None,
username=None, password=None):
pass
def i2f(self, i):
return struct.unpack('<d', struct.pack('<Q', i))[0]
def get_map_objects(self, latitude=None, longitude=None,
since_timestamp_ms=None, cell_id=None):
location = (self.i2f(latitude), self.i2f(longitude))
response = requests.get('{}/scan/{}/{}'.format(self.mock, *location))
return response.json()
| agpl-3.0 |
a115027a/Openkore | src/scons-local-2.0.1/SCons/Tool/yacc.py | 61 | 4750 | """SCons.Tool.yacc
Tool-specific initialization for yacc.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/yacc.py 5134 2010/08/16 23:02:40 bdeegan"
import os.path
import SCons.Defaults
import SCons.Tool
import SCons.Util
YaccAction = SCons.Action.Action("$YACCCOM", "$YACCCOMSTR")
def _yaccEmitter(target, source, env, ysuf, hsuf):
yaccflags = env.subst("$YACCFLAGS", target=target, source=source)
flags = SCons.Util.CLVar(yaccflags)
targetBase, targetExt = os.path.splitext(SCons.Util.to_String(target[0]))
if '.ym' in ysuf: # If using Objective-C
target = [targetBase + ".m"] # the extension is ".m".
# If -d is specified on the command line, yacc will emit a .h
# or .hpp file with the same name as the .c or .cpp output file.
if '-d' in flags:
target.append(targetBase + env.subst(hsuf, target=target, source=source))
# If -g is specified on the command line, yacc will emit a .vcg
# file with the same base name as the .y, .yacc, .ym or .yy file.
if "-g" in flags:
base, ext = os.path.splitext(SCons.Util.to_String(source[0]))
target.append(base + env.subst("$YACCVCGFILESUFFIX"))
# With --defines and --graph, the name of the file is totally defined
# in the options.
fileGenOptions = ["--defines=", "--graph="]
for option in flags:
for fileGenOption in fileGenOptions:
l = len(fileGenOption)
if option[:l] == fileGenOption:
# A file generating option is present, so add the file
# name to the list of targets.
fileName = option[l:].strip()
target.append(fileName)
return (target, source)
def yEmitter(target, source, env):
return _yaccEmitter(target, source, env, ['.y', '.yacc'], '$YACCHFILESUFFIX')
def ymEmitter(target, source, env):
return _yaccEmitter(target, source, env, ['.ym'], '$YACCHFILESUFFIX')
def yyEmitter(target, source, env):
return _yaccEmitter(target, source, env, ['.yy'], '$YACCHXXFILESUFFIX')
def generate(env):
"""Add Builders and construction variables for yacc to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action('.y', YaccAction)
c_file.add_emitter('.y', yEmitter)
c_file.add_action('.yacc', YaccAction)
c_file.add_emitter('.yacc', yEmitter)
# Objective-C
c_file.add_action('.ym', YaccAction)
c_file.add_emitter('.ym', ymEmitter)
# C++
cxx_file.add_action('.yy', YaccAction)
cxx_file.add_emitter('.yy', yyEmitter)
env['YACC'] = env.Detect('bison') or 'yacc'
env['YACCFLAGS'] = SCons.Util.CLVar('')
env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES'
env['YACCHFILESUFFIX'] = '.h'
# Apparently, OS X now creates file.hpp like everybody else
# I have no idea when it changed; it was fixed in 10.4
#if env['PLATFORM'] == 'darwin':
# # Bison on Mac OS X just appends ".h" to the generated target .cc
# # or .cpp file name. Hooray for delayed expansion of variables.
# env['YACCHXXFILESUFFIX'] = '${TARGET.suffix}.h'
#else:
# env['YACCHXXFILESUFFIX'] = '.hpp'
env['YACCHXXFILESUFFIX'] = '.hpp'
env['YACCVCGFILESUFFIX'] = '.vcg'
def exists(env):
return env.Detect(['bison', 'yacc'])
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-2.0 |
EkaterinaFedorova/murano-repository | muranorepository/openstack/common/network_utils.py | 57 | 2690 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Network-related utilities and helper functions.
"""
import urlparse
def parse_host_port(address, default_port=None):
"""Interpret a string as a host:port pair.
An IPv6 address MUST be escaped if accompanied by a port,
because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
means both [2001:db8:85a3::8a2e:370:7334] and
[2001:db8:85a3::8a2e:370]:7334.
>>> parse_host_port('server01:80')
('server01', 80)
>>> parse_host_port('server01')
('server01', None)
>>> parse_host_port('server01', default_port=1234)
('server01', 1234)
>>> parse_host_port('[::1]:80')
('::1', 80)
>>> parse_host_port('[::1]')
('::1', None)
>>> parse_host_port('[::1]', default_port=1234)
('::1', 1234)
>>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234)
('2001:db8:85a3::8a2e:370:7334', 1234)
"""
if address[0] == '[':
# Escaped ipv6
_host, _port = address[1:].split(']')
host = _host
if ':' in _port:
port = _port.split(':')[1]
else:
port = default_port
else:
if address.count(':') == 1:
host, port = address.split(':')
else:
# 0 means ipv4, >1 means ipv6.
# We prohibit unescaped ipv6 addresses with port.
host = address
port = default_port
return (host, None if port is None else int(port))
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL using urlparse.urlsplit(), splitting query and fragments.
This function papers over Python issue9374 when needed.
The parameters are the same as urlparse.urlsplit.
"""
scheme, netloc, path, query, fragment = urlparse.urlsplit(
url, scheme, allow_fragments)
if allow_fragments and '#' in path:
path, fragment = path.split('#', 1)
if '?' in path:
path, query = path.split('?', 1)
return urlparse.SplitResult(scheme, netloc, path, query, fragment)
| apache-2.0 |
CelticWebSolutions/Android-Kernel-2.6.35-Nook-Tablet | tools/perf/scripts/python/failed-syscalls-by-pid.py | 944 | 1869 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
usage = "perf trace -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
pass
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if for_comm is not None:
if common_comm != for_comm:
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16d\n" % (id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20d %10d\n" % (ret, val),
| gpl-2.0 |
gtko/CouchPotatoServer | libs/apscheduler/events.py | 144 | 2529 | __all__ = ('EVENT_SCHEDULER_START', 'EVENT_SCHEDULER_SHUTDOWN',
'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED',
'EVENT_JOBSTORE_JOB_ADDED', 'EVENT_JOBSTORE_JOB_REMOVED',
'EVENT_JOB_EXECUTED', 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED',
'EVENT_ALL', 'SchedulerEvent', 'JobStoreEvent', 'JobEvent')
EVENT_SCHEDULER_START = 1 # The scheduler was started
EVENT_SCHEDULER_SHUTDOWN = 2 # The scheduler was shut down
EVENT_JOBSTORE_ADDED = 4 # A job store was added to the scheduler
EVENT_JOBSTORE_REMOVED = 8 # A job store was removed from the scheduler
EVENT_JOBSTORE_JOB_ADDED = 16 # A job was added to a job store
EVENT_JOBSTORE_JOB_REMOVED = 32 # A job was removed from a job store
EVENT_JOB_EXECUTED = 64 # A job was executed successfully
EVENT_JOB_ERROR = 128 # A job raised an exception during execution
EVENT_JOB_MISSED = 256 # A job's execution was missed
EVENT_ALL = (EVENT_SCHEDULER_START | EVENT_SCHEDULER_SHUTDOWN |
EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED |
EVENT_JOBSTORE_JOB_ADDED | EVENT_JOBSTORE_JOB_REMOVED |
EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED)
class SchedulerEvent(object):
"""
An event that concerns the scheduler itself.
:var code: the type code of this event
"""
def __init__(self, code):
self.code = code
class JobStoreEvent(SchedulerEvent):
"""
An event that concerns job stores.
:var alias: the alias of the job store involved
:var job: the new job if a job was added
"""
def __init__(self, code, alias, job=None):
SchedulerEvent.__init__(self, code)
self.alias = alias
if job:
self.job = job
class JobEvent(SchedulerEvent):
"""
An event that concerns the execution of individual jobs.
:var job: the job instance in question
:var scheduled_run_time: the time when the job was scheduled to be run
:var retval: the return value of the successfully executed job
:var exception: the exception raised by the job
:var traceback: the traceback object associated with the exception
"""
def __init__(self, code, job, scheduled_run_time, retval=None,
exception=None, traceback=None):
SchedulerEvent.__init__(self, code)
self.job = job
self.scheduled_run_time = scheduled_run_time
self.retval = retval
self.exception = exception
self.traceback = traceback
| gpl-3.0 |
motion2015/a3 | lms/djangoapps/course_structure_api/v0/serializers.py | 22 | 2671 | """ Django REST Framework Serializers """
from django.core.urlresolvers import reverse
from rest_framework import serializers
from courseware.courses import course_image_url
class CourseSerializer(serializers.Serializer):
""" Serializer for Courses """
id = serializers.CharField() # pylint: disable=invalid-name
name = serializers.CharField(source='display_name')
category = serializers.CharField()
org = serializers.SerializerMethodField('get_org')
run = serializers.SerializerMethodField('get_run')
course = serializers.SerializerMethodField('get_course')
uri = serializers.SerializerMethodField('get_uri')
image_url = serializers.SerializerMethodField('get_image_url')
start = serializers.DateTimeField()
end = serializers.DateTimeField()
def get_org(self, course):
""" Gets the course org """
return course.id.org
def get_run(self, course):
""" Gets the course run """
return course.id.run
def get_course(self, course):
""" Gets the course """
return course.id.course
def get_uri(self, course):
""" Builds course detail uri """
# pylint: disable=no-member
request = self.context['request']
return request.build_absolute_uri(reverse('course_structure_api:v0:detail', kwargs={'course_id': course.id}))
def get_image_url(self, course):
""" Get the course image URL """
return course_image_url(course)
class GradingPolicySerializer(serializers.Serializer):
""" Serializer for course grading policy. """
assignment_type = serializers.CharField(source='type')
count = serializers.IntegerField(source='min_count')
dropped = serializers.IntegerField(source='drop_count')
weight = serializers.FloatField()
# pylint: disable=invalid-name
class BlockSerializer(serializers.Serializer):
""" Serializer for course structure block. """
id = serializers.CharField(source='usage_key')
type = serializers.CharField(source='block_type')
display_name = serializers.CharField()
graded = serializers.BooleanField(default=False)
format = serializers.CharField()
children = serializers.CharField()
class CourseStructureSerializer(serializers.Serializer):
""" Serializer for course structure. """
root = serializers.CharField(source='root')
blocks = serializers.SerializerMethodField('get_blocks')
def get_blocks(self, structure):
""" Serialize the individual blocks. """
serialized = {}
for key, block in structure['blocks'].iteritems():
serialized[key] = BlockSerializer(block).data
return serialized
| agpl-3.0 |
geekboxzone/lollipop_prebuilts_gcc_darwin-x86_mips_mips64el-linux-android-4.9 | share/gdb/python/gdb/command/prompt.py | 120 | 2135 | # Extended prompt.
# Copyright (C) 2011-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""GDB command for working with extended prompts."""
import gdb
import gdb.prompt
class _ExtendedPrompt(gdb.Parameter):
"""Set the extended prompt.
Usage: set extended-prompt VALUE
Substitutions are applied to VALUE to compute the real prompt.
The currently defined substitutions are:
"""
# Add the prompt library's dynamically generated help to the
# __doc__ string.
__doc__ = __doc__ + gdb.prompt.prompt_help()
set_doc = "Set the extended prompt."
show_doc = "Show the extended prompt."
def __init__(self):
super(_ExtendedPrompt, self).__init__("extended-prompt",
gdb.COMMAND_SUPPORT,
gdb.PARAM_STRING_NOESCAPE)
self.value = ''
self.hook_set = False
def get_show_string (self, pvalue):
if self.value is not '':
return "The extended prompt is: " + self.value
else:
return "The extended prompt is not set."
def get_set_string (self):
if self.hook_set == False:
gdb.prompt_hook = self.before_prompt_hook
self.hook_set = True
return ""
def before_prompt_hook(self, current):
if self.value is not '':
newprompt = gdb.prompt.substitute_prompt(self.value)
return newprompt.replace('\\', '\\\\')
else:
return None
_ExtendedPrompt()
| gpl-2.0 |
aspidites/beets | beetsplug/web/__init__.py | 1 | 3486 | # This file is part of beets.
# Copyright 2011, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""A Web interface to beets."""
from beets.plugins import BeetsPlugin
from beets import ui
import beets.library
import flask
from flask import g
DEFAULT_HOST = ''
DEFAULT_PORT = 8337
# Utilities.
def _rep(obj):
if isinstance(obj, beets.library.Item):
out = dict(obj.record)
del out['path']
return out
elif isinstance(obj, beets.library.Album):
out = dict(obj._record)
del out['artpath']
out['items'] = [_rep(item) for item in obj.items()]
return out
# Flask setup.
app = flask.Flask(__name__)
@app.before_request
def before_request():
g.lib = app.config['lib']
# Items.
@app.route('/item/<int:item_id>')
def single_item(item_id):
item = g.lib.get_item(item_id)
return flask.jsonify(_rep(item))
@app.route('/item/')
def all_items():
with g.lib.transaction() as tx:
rows = tx.query("SELECT id FROM items")
all_ids = [row[0] for row in rows]
return flask.jsonify(item_ids=all_ids)
@app.route('/item/<int:item_id>/file')
def item_file(item_id):
item = g.lib.get_item(item_id)
return flask.send_file(item.path)
@app.route('/item/query/<path:query>')
def item_query(query):
parts = query.split('/')
items = g.lib.items(parts)
return flask.jsonify(results=[_rep(item) for item in items])
# Albums.
@app.route('/album/<int:album_id>')
def single_album(album_id):
album = g.lib.get_album(album_id)
return flask.jsonify(_rep(album))
@app.route('/album/')
def all_albums():
with g.lib.transaction() as tx:
rows = tx.query("SELECT id FROM albums")
all_ids = [row[0] for row in rows]
return flask.jsonify(album_ids=all_ids)
@app.route('/album/query/<path:query>')
def album_query(query):
parts = query.split('/')
albums = g.lib.albums(parts)
return flask.jsonify(results=[_rep(album) for album in albums])
@app.route('/album/<int:album_id>/art')
def album_art(album_id):
album = g.lib.get_album(album_id)
return flask.send_file(album.artpath)
# UI.
@app.route('/')
def home():
return flask.render_template('index.html')
# Plugin hook.
class WebPlugin(BeetsPlugin):
def commands(self):
cmd = ui.Subcommand('web', help='start a Web interface')
cmd.parser.add_option('-d', '--debug', action='store_true',
default=False, help='debug mode')
def func(lib, config, opts, args):
host = args.pop(0) if args else \
beets.ui.config_val(config, 'web', 'host', DEFAULT_HOST)
port = args.pop(0) if args else \
beets.ui.config_val(config, 'web', 'port', str(DEFAULT_PORT))
port = int(port)
app.config['lib'] = lib
app.run(host=host, port=port, debug=opts.debug, threaded=True)
cmd.func = func
return [cmd]
| mit |
waheedahmed/edx-platform | openedx/core/lib/block_structure/factory.py | 22 | 3260 | """
Module for factory class for BlockStructure objects.
"""
from .block_structure import BlockStructureModulestoreData
class BlockStructureFactory(object):
"""
Factory class for BlockStructure objects.
"""
@classmethod
def create_from_modulestore(cls, root_block_usage_key, modulestore):
"""
Creates and returns a block structure from the modulestore
starting at the given root_block_usage_key.
Arguments:
root_block_usage_key (UsageKey) - The usage_key for the root
of the block structure that is to be created.
modulestore (ModuleStoreRead) - The modulestore that
contains the data for the xBlocks within the block
structure starting at root_block_usage_key.
Returns:
BlockStructureModulestoreData - The created block structure
with instantiated xBlocks from the given modulestore
starting at root_block_usage_key.
Raises:
xmodule.modulestore.exceptions.ItemNotFoundError if a block for
root_block_usage_key is not found in the modulestore.
"""
block_structure = BlockStructureModulestoreData(root_block_usage_key)
blocks_visited = set()
def build_block_structure(xblock):
"""
Recursively update the block structure with the given xBlock
and its descendants.
"""
# Check if the xblock was already visited (can happen in
# DAGs).
if xblock.location in blocks_visited:
return
# Add the xBlock.
blocks_visited.add(xblock.location)
block_structure._add_xblock(xblock.location, xblock) # pylint: disable=protected-access
# Add relations with its children and recurse.
for child in xblock.get_children():
block_structure._add_relation(xblock.location, child.location) # pylint: disable=protected-access
build_block_structure(child)
root_xblock = modulestore.get_item(root_block_usage_key, depth=None)
build_block_structure(root_xblock)
return block_structure
@classmethod
def create_from_cache(cls, root_block_usage_key, block_structure_cache):
"""
Deserializes and returns the block structure starting at
root_block_usage_key from the given cache, if it's found in the cache.
The given root_block_usage_key must equate the root_block_usage_key
previously passed to serialize_to_cache.
Arguments:
root_block_usage_key (UsageKey) - The usage_key for the root
of the block structure that is to be deserialized from
the given cache.
block_structure_cache (BlockStructureCache) - The
cache from which the block structure is to be
deserialized.
Returns:
BlockStructure - The deserialized block structure starting
at root_block_usage_key, if found in the cache.
NoneType - If the root_block_usage_key is not found in the cache.
"""
return block_structure_cache.get(root_block_usage_key)
| agpl-3.0 |
lordappsec/ooni-probe | ooni/utils/onion.py | 5 | 3654 | import string
import subprocess
from distutils.spawn import find_executable
from distutils.version import LooseVersion
from txtorcon.util import find_tor_binary as tx_find_tor_binary
from ooni.settings import config
class TorVersion(LooseVersion):
pass
class OBFSProxyVersion(LooseVersion):
pass
def find_tor_binary():
if config.advanced.tor_binary:
return config.advanced.tor_binary
return tx_find_tor_binary()
def executable_version(binary, strip=lambda x: x):
if not binary:
return None
try:
proc = subprocess.Popen((binary, '--version'),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError:
pass
else:
stdout, _ = proc.communicate()
if proc.poll() == 0 and stdout != '':
version = stdout.strip()
return LooseVersion(strip(version))
return None
def tor_version():
version = executable_version(find_tor_binary(),
lambda x: x.split(' ')[2])
return TorVersion(str(version))
def obfsproxy_version():
version = executable_version(find_executable('obfsproxy'))
return OBFSProxyVersion(str(version))
def transport_name(address):
"""
If the address of the bridge starts with a valid c identifier then
we consider it to be a bridge.
Returns:
The transport_name if it's a transport.
None if it's not a obfsproxy bridge.
"""
transport_name = address.split(' ')[0]
transport_name_chars = string.ascii_letters + string.digits
if all(c in transport_name_chars for c in transport_name):
return transport_name
else:
return None
tor_details = {
'binary': find_tor_binary(),
'version': tor_version()
}
obfsproxy_details = {
'binary': find_executable('obfsproxy'),
'version': obfsproxy_version()
}
transport_bin_name = { 'fte': 'fteproxy',
'scramblesuit': 'obfsproxy',
'obfs2': 'obfsproxy',
'obfs3': 'obfsproxy',
'obfs4': 'obfs4proxy' }
_pyobfsproxy_line = lambda transport, bin_loc, log_file: \
"%s exec %s --log-min-severity info --log-file %s managed" % \
(transport, bin_loc, log_file)
_transport_line_templates = {
'fte': lambda bin_loc, log_file : \
"fte exec %s --managed" % bin_loc,
'scramblesuit': lambda bin_loc, log_file: \
_pyobfsproxy_line('scramblesuit', bin_loc, log_file),
'obfs2': lambda bin_loc, log_file: \
_pyobfsproxy_line('obfs2', bin_loc, log_file),
'obfs3': lambda bin_loc, log_file: \
_pyobfsproxy_line('obfs3', bin_loc, log_file),
'obfs4': lambda bin_loc, log_file: \
"obfs4 exec %s --enableLogging=true --logLevel=INFO" % bin_loc }
class UnrecognizedTransport(Exception):
pass
class UninstalledTransport(Exception):
pass
class OutdatedObfsproxy(Exception):
pass
class OutdatedTor(Exception):
pass
def bridge_line(transport, log_file):
bin_name = transport_bin_name.get(transport)
if not bin_name:
raise UnrecognizedTransport
bin_loc = find_executable(bin_name)
if not bin_loc:
raise UninstalledTransport
if OBFSProxyVersion('0.2') > obfsproxy_details['version']:
raise OutdatedObfsproxy
if (transport == 'scramblesuit' or \
bin_name == 'obfs4proxy') and \
TorVersion('0.2.5.1') > tor_details['version']:
raise OutdatedTor
if TorVersion('0.2.4.1') > tor_details['version']:
raise OutdatedTor
return _transport_line_templates[transport](bin_loc, log_file)
| bsd-2-clause |
sadaszewski/scimd | tables.py | 1 | 4234 | #
# Copyright (C) 2015, Stanislaw Adaszewski
# [email protected]
# http://algoholic.eu
#
# License: 2-clause BSD
#
from markdown import Extension
from markdown.blockprocessors import BlockProcessor
from markdown.util import etree
import numpy as np
from collections import defaultdict
import numpy.core.defchararray as dca
class TableExtension(Extension):
def extendMarkdown(self, md, md_globals):
md.parser.blockprocessors.add('table',
TableProcessor(md.parser),
'<hashheader')
def makeExtension(configs={}):
return TableExtension(configs=configs)
class TableProcessor(BlockProcessor):
def test(self, parent, block):
lines = block.split('\n')
for l in lines:
if set(l.strip()) == set(('-', '|')):
return True
return False
def run(self, parent, blocks):
block = blocks.pop(0)
lines = map(lambda x: list(x.strip()), block.split('\n'))
# print 'lines:', lines
ary = np.array(lines, dtype='|U1')
cstart = np.zeros(ary.shape, dtype=np.int)
cend = np.zeros(ary.shape, dtype=np.int)
for r in xrange(ary.shape[0]):
for c in xrange(ary.shape[1]):
if ary[r, c] == '|':
if c + 1 < ary.shape[1] and (r == 0 or ary[r - 1, c + 1] == '-'):
cstart[r, c] = True
if c > 0 and (r + 1 == ary.shape[0] or ary[r + 1, c - 1] == '-'):
cend[r, c] = True
cstart = zip(*np.nonzero(cstart))
cend = zip(*np.nonzero(cend))
# print 'cstart:', cstart
# print 'cend:', cend
rpos = np.nonzero(np.max(ary == '-', axis=1))
cpos = np.nonzero(np.max(ary == '|', axis=0))
# print rpos
# print cpos
assert(len(cstart) == len(cend))
cells = []
for k in xrange(len(cstart)):
r, c = cstart[k][0], cstart[k][1] + 1
while r < ary.shape[0] and c < ary.shape[1]:
# print r, c
if ary[r, c] == '|':
if (r, c) in cend:
rowspan = len(np.nonzero((rpos >= cstart[k][0]) * (rpos <= r))[0]) + 1
colspan = len(np.nonzero((cpos >= cstart[k][1]) * (cpos <= c))[0]) - 1
# print 'Cell', k, cstart[k], (r, c), 'rowspan:', rowspan, 'colspan:', colspan
# print ' %s' % ary[cstart[k][0]:r+1, cstart[k][1]:c-1].tostring()
cells.append((cstart[k], (r, c), rowspan, colspan))
break
else:
r += 1
c = cstart[k][1]
c += 1
# print cells
table = etree.SubElement(parent, 'table')
# table.set('style', 'border: solid 1px black;')
table.set('border', '1')
rows = defaultdict(lambda: [])
for k in xrange(len(cells)):
cell = cells[k]
r = len(np.nonzero(rpos < cells[k][0][0])[0])
c = len(np.nonzero(cpos < cells[k][0][1])[0])
# print 'Cell', k, 'r:', r, 'c:', c, 'rowspan:', cells[k][2], 'colspan:', cells[k][3]
text = ary[cells[k][0][0]:cells[k][1][0]+1, cells[k][0][1]+1:cells[k][1][1]]
text = map(lambda x: u''.join(x).strip(), text)
# text = list(np.ravel(text))
# text = np
text = u'\n'.join(text) # map(lambda x: x.tostring().strip(), text))
# print ' %s' % text
rows[r].append((text, cells[k][2], cells[k][3]))
for r in xrange(len(rows)):
# print 'Row', r
tr = etree.SubElement(table, 'tr')
for c in xrange(len(rows[r])):
td = etree.SubElement(tr, 'td')
try:
td.text = rows[r][c][0] # .encode('utf-8')
except:
print str(type(block))
raise ValueError(str(rows[r][c][0]) + ' ' + str(type(rows[r][c][0])))
td.set('rowspan', str(rows[r][c][1]))
td.set('colspan', str(rows[r][c][2]))
# return table
| bsd-2-clause |
levilucio/SyVOLT | UMLRT2Kiltera_MM/Properties/from_thesis/HSS3_then2_IsolatedLHS.py | 1 | 2493 | from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HSS3_then2_IsolatedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HSS3_then2_IsolatedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HSS3_then2_IsolatedLHS, self).__init__(name='HSS3_then2_IsolatedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'SS3_then2')
# Set the node attributes
# Add the attribute equations
self["equations"] = []
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
| mit |
daniestevez/gr-satellites | python/pwsat2_submitter.py | 1 | 4853 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2018 Daniel Estevez <[email protected]>
#
# This file is part of gr-satellites
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This contains code taken from
# https://github.com/PW-Sat2/SimpleUploader-radio.pw-sat.pl
# That code is licenced under the following terms:
# MIT License
#
# Copyright (c) 2017 SoftwareMill
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import base64
import datetime
import json
from gnuradio import gr
import numpy
import pmt
from . import hdlc
class pwsat2_submitter(gr.basic_block):
"""docstring for block pwsat2_submitter"""
def __init__(self, credentials_file, initialTimestamp):
gr.basic_block.__init__(
self,
name="pwsat2_submitter",
in_sig=[],
out_sig=[])
self.requests = __import__('requests')
self.baseUrl = 'http://radio.pw-sat.pl'
self.headers = {'content-type': 'application/json'}
dtformat = '%Y-%m-%d %H:%M:%S'
self.initialTimestamp = (
datetime.datetime.strptime(initialTimestamp, dtformat)
if initialTimestamp != '' else None)
self.startTimestamp = datetime.datetime.utcnow()
self.authenticate(credentials_file)
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
def authenticate(self, credentials_path):
try:
credentials = self.loadCredentials(credentials_path)
except (ValueError, IOError) as e:
print('Could not load credentials for', self.baseUrl)
print(e)
self.cookies = None
return
url = self.baseUrl+'/api/authenticate'
response = self.requests.post(url,
data=json.dumps(credentials),
headers=self.headers)
if response.status_code == 200:
self.cookies = response.cookies
else:
print('Could not authenticate to PW-Sat2 server')
print('Reply:', response.text)
print('HTTP code', response.status_code)
self.cookies = None
def loadCredentials(self, path):
with open(path) as f:
credentials = json.load(f)
return credentials
def putPacket(self, frame, timestamp):
if self.cookies is None:
print('Not uploading packet to',
self.baseUrl,
'as we are not authenticated')
return
url = self.baseUrl+'/communication/frame'
timestamp = (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
timestamp = int(timestamp * 1000)
payload = {'frame': str(base64.b64encode(frame), encoding='ascii'),
'timestamp': timestamp,
'traffic': 'Rx'}
response = self.requests.put(url,
data=json.dumps(payload),
headers=self.headers,
cookies=self.cookies)
return response.text
def handle_msg(self, msg_pmt):
msg = pmt.cdr(msg_pmt)
if not pmt.is_u8vector(msg):
print('[ERROR] Received invalid message type. Expected u8vector')
return
data = bytearray(pmt.u8vector_elements(msg))
crc = hdlc.crc_ccitt(data)
data.append(crc & 0xff)
data.append((crc >> 8) & 0xff)
frame = bytes(data)
now = datetime.datetime.utcnow()
timestamp = (now - self.startTimestamp + self.initialTimestamp
if self.initialTimestamp else now)
response = self.putPacket(frame, timestamp)
if response:
print('Packet uploaded to', self.baseUrl, response)
| gpl-3.0 |
was4444/chromium.src | tools/polymer/txt_to_polymer_grdp.py | 21 | 2455 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import with_statement
import os
import string
import sys
FILE_TEMPLATE = \
"""<?xml version="1.0" encoding="utf-8"?>
<!--
This file is generated.
Please use 'src/tools/polymer/polymer_grdp_to_txt.py' and
'src/tools/polymer/txt_to_polymer_grdp.py' to modify it, if possible.
'polymer_grdp_to_txt.py' converts 'polymer_resources.grdp' to a plane list of
used Polymer components:
...
iron-iron-iconset/iron-iconset-extracted.js
iron-iron-iconset/iron-iconset.html
...
'txt_to_polymer_grdp.py' converts list back to GRDP file.
Usage:
$ polymer_grdp_to_txt.py polymer_resources.grdp > /tmp/list.txt
$ vim /tmp/list.txt
$ txt_to_polymer_grdp.py /tmp/list.txt > polymer_resources.grdp
-->
<grit-part>
<!-- Polymer 1.0 -->
%(v_1_0)s
<structure name="IDR_POLYMER_1_0_WEB_ANIMATIONS_JS_WEB_ANIMATIONS_NEXT_LITE_MIN_JS"
file="../../../third_party/web-animations-js/sources/web-animations-next-lite.min.js"
type="chrome_html" />
</grit-part>
"""
DEFINITION_TEMPLATE_1_0 = \
""" <structure name="%s"
file="../../../third_party/polymer/v1_0/components-chromium/%s"
type="chrome_html" />"""
def PathToGritId(path):
table = string.maketrans(string.lowercase + '/.-', string.uppercase + '___')
return 'IDR_POLYMER_1_0_' + path.translate(table)
def SortKey(record):
return (record, PathToGritId(record))
def ParseRecord(record):
return record.strip()
class FileNotFoundException(Exception):
pass
_HERE = os.path.dirname(os.path.realpath(__file__))
_POLYMER_DIR = os.path.join(_HERE, os.pardir, os.pardir,
'third_party', 'polymer', 'v1_0', 'components-chromium')
def main(argv):
with open(argv[1]) as f:
records = [ParseRecord(r) for r in f if not r.isspace()]
lines = { 'v_1_0': [] }
for path in sorted(set(records), key=SortKey):
full_path = os.path.normpath(os.path.join(_POLYMER_DIR, path))
if not os.path.exists(full_path):
raise FileNotFoundException('%s not found' % full_path)
template = DEFINITION_TEMPLATE_1_0
lines['v_1_0'].append(
template % (PathToGritId(path), path))
print FILE_TEMPLATE % { 'v_1_0': '\n'.join(lines['v_1_0']) }
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
spennihana/h2o-3 | h2o-py/tests/testdir_misc/pyunit_upload_file.py | 8 | 1807 | from __future__ import print_function
from builtins import zip
import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
def upload_file():
a = h2o.upload_file(pyunit_utils.locate("smalldata/logreg/prostate.csv"))
print(a.describe())
from h2o import H2OFrame
# using lists []
py_list_to_h2o = H2OFrame([[0, 1, 2, 3, 4]])
print(py_list_to_h2o.describe())
py_list_to_h2o_2 = H2OFrame([[0, 1, 2, 3], [5, 6, "hi", "dog"]])
print(py_list_to_h2o_2.describe())
# using tuples ()
py_tuple_to_h2o = H2OFrame([(0, 1, 2, 3, 4)])
print(py_tuple_to_h2o.describe())
py_tuple_to_h2o_2 = H2OFrame(((0, 1, 2, 3), (5, 6, "hi", "dog")))
print(py_tuple_to_h2o_2.describe())
# using dicts {}
py_dict_to_h2o = H2OFrame({"column1": [5, 4, 3, 2, 1],
"column2": (1, 2, 3, 4, 5)})
py_dict_to_h2o.describe()
py_dict_to_h2o_2 = H2OFrame({"colA": ["bilbo", "baggins"], "colB": ["meow"]})
print(py_dict_to_h2o_2.describe())
# using collections.OrderedDict
import collections
d = {"colA": ["bilbo", "baggins"], "colB": ["meow"]} # still unordered!
py_ordered_dict_to_h2o = H2OFrame(collections.OrderedDict(d))
py_ordered_dict_to_h2o.describe()
# make an ordered dictionary!
d2 = collections.OrderedDict()
d2["colA"] = ["bilbo", "baggins"]
d2["colB"] = ["meow"]
py_ordered_dict_to_h2o_2 = H2OFrame(collections.OrderedDict(d2))
py_ordered_dict_to_h2o_2.describe()
# numpy.array
# import numpy as np
#
# py_numpy_ary_to_h2o = H2OFrame(np.ones((50, 100), dtype=int))
#
# py_numpy_ary_to_h2o.describe()
if __name__ == "__main__":
pyunit_utils.standalone_test(upload_file)
else:
upload_file()
| apache-2.0 |
defcello/PatchCorral | src/data/synthesizers/rolandfantomxr/PRD.py | 4 | 8165 | ####################################################################################################
# Copyright 2013 John Crawford
#
# This file is part of PatchCorral.
#
# PatchCorral is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PatchCorral is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PatchCorral. If not, see <http://www.gnu.org/licenses/>.
####################################################################################################
## @file
# Module Information.
# @date 3/10/2013 Created file. -jc
# @author John Crawford
NAME = 'PR-D'
PATCHES = [
('HPF Sweep', 87, 67, 0, 'TECHNO SYNTH', 'PR-D 001'),
('Moon Synth', 87, 67, 1, 'TECHNO SYNTH', 'PR-D 002'),
('DelyResoSaws', 87, 67, 2, 'TECHNO SYNTH', 'PR-D 003'),
('R-Trance', 87, 67, 3, 'TECHNO SYNTH', 'PR-D 004'),
('Alfa Retro', 87, 67, 4, 'TECHNO SYNTH', 'PR-D 005'),
('Nu Hoover', 87, 67, 5, 'TECHNO SYNTH', 'PR-D 006'),
('Hoovercraft', 87, 67, 6, 'TECHNO SYNTH', 'PR-D 007'),
('Braatz...', 87, 67, 7, 'TECHNO SYNTH', 'PR-D 008'),
('AllinOneRiff', 87, 67, 8, 'TECHNO SYNTH', 'PR-D 009'),
('YZ Again', 87, 67, 9, 'TECHNO SYNTH', 'PR-D 010'),
('Flazzy Lead', 87, 67, 10, 'TECHNO SYNTH', 'PR-D 011'),
('Coffee Bee', 87, 67, 11, 'TECHNO SYNTH', 'PR-D 012'),
('Sweet House', 87, 67, 12, 'TECHNO SYNTH', 'PR-D 013'),
('Alien Bubble', 87, 67, 13, 'TECHNO SYNTH', 'PR-D 014'),
('LowFreqHit', 87, 67, 14, 'TECHNO SYNTH', 'PR-D 015'),
('Loonacy', 87, 67, 15, 'TECHNO SYNTH', 'PR-D 016'),
('Periscope', 87, 67, 16, 'TECHNO SYNTH', 'PR-D 017'),
('Electrostars', 87, 67, 17, 'TECHNO SYNTH', 'PR-D 018'),
('Going Mad!', 87, 67, 18, 'TECHNO SYNTH', 'PR-D 019'),
('LoFiSequence', 87, 67, 19, 'TECHNO SYNTH', 'PR-D 020'),
('DreamInColor', 87, 67, 20, 'TECHNO SYNTH', 'PR-D 021'),
('MelodicDrums', 87, 67, 21, 'TECHNO SYNTH', 'PR-D 022'),
('Techno Snips', 87, 67, 22, 'TECHNO SYNTH', 'PR-D 023'),
('TB Wah', 87, 67, 23, 'TECHNO SYNTH', 'PR-D 024'),
('Waving TB303', 87, 67, 24, 'TECHNO SYNTH', 'PR-D 025'),
('Digi Seq', 87, 67, 25, 'TECHNO SYNTH', 'PR-D 026'),
('Seq Saw', 87, 67, 26, 'TECHNO SYNTH', 'PR-D 027'),
('Reso Seq Saw', 87, 67, 27, 'TECHNO SYNTH', 'PR-D 028'),
('DetuneSeqSaw', 87, 67, 28, 'TECHNO SYNTH', 'PR-D 029'),
('Technotribe', 87, 67, 29, 'TECHNO SYNTH', 'PR-D 030'),
('MetalVoxBox', 87, 67, 30, 'TECHNO SYNTH', 'PR-D 031'),
('Teethy Grit', 87, 67, 31, 'TECHNO SYNTH', 'PR-D 032'),
('Repertition', 87, 67, 32, 'TECHNO SYNTH', 'PR-D 033'),
('Jucy Saw', 87, 67, 33, 'OTHER SYNTH', 'PR-D 034'),
('Cue Tip', 87, 67, 34, 'OTHER SYNTH', 'PR-D 035'),
('TB-Sequence', 87, 67, 35, 'OTHER SYNTH', 'PR-D 036'),
('Europe Xpres', 87, 67, 36, 'OTHER SYNTH', 'PR-D 037'),
('Squeepy', 87, 67, 37, 'OTHER SYNTH', 'PR-D 038'),
('Atmorave', 87, 67, 38, 'OTHER SYNTH', 'PR-D 039'),
('DOC Stack', 87, 67, 39, 'OTHER SYNTH', 'PR-D 040'),
('Sweep Lead', 87, 67, 40, 'OTHER SYNTH', 'PR-D 041'),
('Digitaless', 87, 67, 41, 'OTHER SYNTH', 'PR-D 042'),
('Flip Pad', 87, 67, 42, 'OTHER SYNTH', 'PR-D 043'),
('Short Detune', 87, 67, 43, 'OTHER SYNTH', 'PR-D 044'),
('forSequence', 87, 67, 44, 'OTHER SYNTH', 'PR-D 045'),
('Memory Pluck', 87, 67, 45, 'OTHER SYNTH', 'PR-D 046'),
('Metalic Bass', 87, 67, 46, 'OTHER SYNTH', 'PR-D 047'),
('Aqua', 87, 67, 47, 'OTHER SYNTH', 'PR-D 048'),
('Big Planet', 87, 67, 48, 'OTHER SYNTH', 'PR-D 049'),
('Wet Atax', 87, 67, 49, 'OTHER SYNTH', 'PR-D 050'),
('Houze Clavi', 87, 67, 50, 'OTHER SYNTH', 'PR-D 051'),
('SuperSawSlow', 87, 67, 51, 'OTHER SYNTH', 'PR-D 052'),
('TranceSaws', 87, 67, 52, 'OTHER SYNTH', 'PR-D 053'),
('Trancy Synth', 87, 67, 53, 'OTHER SYNTH', 'PR-D 054'),
('Saw Stack', 87, 67, 54, 'OTHER SYNTH', 'PR-D 055'),
('Frgile Saws', 87, 67, 55, 'OTHER SYNTH', 'PR-D 056'),
('Steamed Sawz', 87, 67, 56, 'OTHER SYNTH', 'PR-D 057'),
('RAVtune', 87, 67, 57, 'OTHER SYNTH', 'PR-D 058'),
('Bustranza', 87, 67, 58, 'OTHER SYNTH', 'PR-D 059'),
('AftTch Ji-n', 87, 67, 59, 'OTHER SYNTH', 'PR-D 060'),
('JP OctAttack', 87, 67, 60, 'OTHER SYNTH', 'PR-D 061'),
('Oct Unison', 87, 67, 61, 'OTHER SYNTH', 'PR-D 062'),
('Xtatic', 87, 67, 62, 'OTHER SYNTH', 'PR-D 063'),
('Dirty Combo', 87, 67, 63, 'OTHER SYNTH', 'PR-D 064'),
('FM\'s Attack', 87, 67, 64, 'OTHER SYNTH', 'PR-D 065'),
('Impression', 87, 67, 65, 'OTHER SYNTH', 'PR-D 066'),
('Digi-vox Syn', 87, 67, 66, 'OTHER SYNTH', 'PR-D 067'),
('Fairy Factor', 87, 67, 67, 'OTHER SYNTH', 'PR-D 068'),
('Tempest', 87, 67, 68, 'OTHER SYNTH', 'PR-D 069'),
('X-Racer', 87, 67, 69, 'OTHER SYNTH', 'PR-D 070'),
('TB Booster', 87, 67, 70, 'OTHER SYNTH', 'PR-D 071'),
('Syn-Orch/Mod', 87, 67, 71, 'OTHER SYNTH', 'PR-D 072'),
('Pressyn', 87, 67, 72, 'OTHER SYNTH', 'PR-D 073'),
('High Five', 87, 67, 73, 'OTHER SYNTH', 'PR-D 074'),
('4DaCommonMan', 87, 67, 74, 'OTHER SYNTH', 'PR-D 075'),
('Orgaenia', 87, 67, 75, 'OTHER SYNTH', 'PR-D 076'),
('Sleeper', 87, 67, 76, 'OTHER SYNTH', 'PR-D 077'),
('Sugar Synth', 87, 67, 77, 'OTHER SYNTH', 'PR-D 078'),
('Ice Palace', 87, 67, 78, 'OTHER SYNTH', 'PR-D 079'),
('Story Harp', 87, 67, 79, 'OTHER SYNTH', 'PR-D 080'),
('LostParadise', 87, 67, 80, 'OTHER SYNTH', 'PR-D 081'),
('Magnetic 5th', 87, 67, 81, 'OTHER SYNTH', 'PR-D 082'),
('Jazz Doos', 87, 67, 82, 'VOX', 'PR-D 083'),
('Beat Vox', 87, 67, 83, 'VOX', 'PR-D 084'),
('Scat Beats', 87, 67, 84, 'VOX', 'PR-D 085'),
('Choir Aahs 1', 87, 67, 85, 'VOX', 'PR-D 086'),
('Choir Aahs 2', 87, 67, 86, 'VOX', 'PR-D 087'),
('ChoirOoh/Aft', 87, 67, 87, 'VOX', 'PR-D 088'),
('Angels Choir', 87, 67, 88, 'VOX', 'PR-D 089'),
('Angelique', 87, 67, 89, 'VOX', 'PR-D 090'),
('Gospel Oohs', 87, 67, 90, 'VOX', 'PR-D 091'),
('Uhmmm', 87, 67, 91, 'VOX', 'PR-D 092'),
('Aah Vox', 87, 67, 92, 'VOX', 'PR-D 093'),
('Morning Star', 87, 67, 93, 'VOX', 'PR-D 094'),
('Syn Opera', 87, 67, 94, 'VOX', 'PR-D 095'),
('BeautifulOne', 87, 67, 95, 'VOX', 'PR-D 096'),
('Ooze', 87, 67, 96, 'VOX', 'PR-D 097'),
('Aerial Choir', 87, 67, 97, 'VOX', 'PR-D 098'),
('3D Vox', 87, 67, 98, 'VOX', 'PR-D 099'),
('FS Sqr Pad', 87, 67, 99, 'SOFT PAD', 'PR-D 100'),
('FS Hollow', 87, 67, 100, 'SOFT PAD', 'PR-D 101'),
('Silk Pad', 87, 67, 101, 'SOFT PAD', 'PR-D 102'),
('WarmReso Pad', 87, 67, 102, 'SOFT PAD', 'PR-D 103'),
('FS Soft Pad', 87, 67, 103, 'SOFT PAD', 'PR-D 104'),
('Soft Breeze', 87, 67, 104, 'SOFT PAD', 'PR-D 105'),
('JP Strings 1', 87, 67, 105, 'SOFT PAD', 'PR-D 106'),
('JP Strings 2', 87, 67, 106, 'SOFT PAD', 'PR-D 107'),
('FS Syn Str', 87, 67, 107, 'SOFT PAD', 'PR-D 108'),
('Syn Strings', 87, 67, 108, 'SOFT PAD', 'PR-D 109'),
('OB Slow Str', 87, 67, 109, 'SOFT PAD', 'PR-D 110'),
('Super SynStr', 87, 67, 110, 'SOFT PAD', 'PR-D 111'),
('Strings Pad', 87, 67, 111, 'SOFT PAD', 'PR-D 112'),
('R&B SoftPad', 87, 67, 112, 'SOFT PAD', 'PR-D 113'),
('Reso Pad', 87, 67, 113, 'SOFT PAD', 'PR-D 114'),
('Phat Pad', 87, 67, 114, 'SOFT PAD', 'PR-D 115'),
('FS PhaserPad', 87, 67, 115, 'SOFT PAD', 'PR-D 116'),
('Mystic Str', 87, 67, 116, 'SOFT PAD', 'PR-D 117'),
('Glass Organ', 87, 67, 117, 'SOFT PAD', 'PR-D 118'),
('Wind Pad', 87, 67, 118, 'SOFT PAD', 'PR-D 119'),
('Combination', 87, 67, 119, 'SOFT PAD', 'PR-D 120'),
('HumanKindnes', 87, 67, 120, 'SOFT PAD', 'PR-D 121'),
('Atmospherics', 87, 67, 121, 'SOFT PAD', 'PR-D 122'),
('Terra Nostra', 87, 67, 122, 'SOFT PAD', 'PR-D 123'),
('OB Aaahs', 87, 67, 123, 'SOFT PAD', 'PR-D 124'),
('Vulcano Pad', 87, 67, 124, 'SOFT PAD', 'PR-D 125'),
('Cloud #9', 87, 67, 125, 'SOFT PAD', 'PR-D 126'),
('Lostscapes', 87, 67, 126, 'SOFT PAD', 'PR-D 127'),
('Organic Pad', 87, 67, 127, 'SOFT PAD', 'PR-D 128'),
]
| gpl-3.0 |
erijo/sunnyportal-py | bin/testclient.py | 1 | 2557 | #!/usr/bin/env python3
# Copyright (c) 2016 Erik Johansson <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
from datetime import date
from getpass import getpass
import configparser
import logging
import sys
import sunnyportal.client
def main():
logging.basicConfig(
format="%(asctime)s %(levelname)s: %(message)s", level=logging.DEBUG
)
if len(sys.argv) != 2:
print("Usage: %s <config>" % sys.argv[0])
sys.exit(1)
section = "sunnyportal"
config = configparser.ConfigParser()
config[section] = {}
config.read(sys.argv[1])
if not config[section].get("email"):
config[section]["email"] = input("E-mail: ")
if not config[section].get("password"):
config[section]["password"] = getpass()
with open(sys.argv[1], "w") as f:
config.write(f)
client = sunnyportal.client.Client(
config[section]["email"], config[section]["password"]
)
for plant in client.get_plants():
logging.info("Found plant %s", plant.name)
# plant.profile()
# plant.year_energy_balance(date(2020,4,1))
# plant.month_energy_balance(date(2020,4,1))
# plant.last_data_exact(date.today())
# for device in plant.get_devices():
# for name, param in device.get_parameters().parameters.items():
# print(f"{name} = {param.value} (changed {param.changed})")
# plant.all_data('year')
# plant.all_data('month')
# plant.day_overview(date(2016, 2, 3))
# plant.day_overview(date(2016, 2, 3), quarter=False)
# plant.month_overview(date(2016, 1, 1))
# plant.year_overview(date(2016, 2, 1))
# for entry in plant.logbook(date(2016, 2, 1)).entries:
# print(f"{entry['date']} | {entry['type']} | {entry['description']}")
client.logout()
if __name__ == "__main__":
main()
| gpl-3.0 |
CJ8664/servo | tests/wpt/web-platform-tests/webdriver/tests/actions/mouse.py | 5 | 4724 | import pytest
from tests.support.inline import inline
from tests.actions.support.refine import get_events, filter_dict
from tests.support.wait import wait
def link_doc(dest):
content = "<a href=\"{}\" id=\"link\">destination</a>".format(dest)
return inline(content)
def get_center(rect):
return {
"x": rect["width"] / 2 + rect["x"],
"y": rect["height"] / 2 + rect["y"],
}
# TODO use pytest.approx once we upgrade to pytest > 3.0
def approx(n, m, tolerance=1):
return abs(n - m) <= tolerance
def test_click_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"], duration=1000) \
.click() \
.perform()
events = get_events(session)
assert len(events) == 4
for e in events:
if e["type"] != "mousemove":
assert e["pageX"] == div_point["x"]
assert e["pageY"] == div_point["y"]
assert e["target"] == "outer"
if e["type"] != "mousedown":
assert e["buttons"] == 0
assert e["button"] == 0
expected = [
{"type": "mousedown", "buttons": 1},
{"type": "mouseup", "buttons": 0},
{"type": "click", "buttons": 0},
]
filtered_events = [filter_dict(e, expected[0]) for e in events]
assert expected == filtered_events[1:]
def test_context_menu_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"]) \
.pointer_down(button=2) \
.pointer_up(button=2) \
.perform()
events = get_events(session)
expected = [
{"type": "mousedown", "button": 2},
{"type": "contextmenu", "button": 2},
]
assert len(events) == 4
filtered_events = [filter_dict(e, expected[0]) for e in events]
mousedown_contextmenu_events = [
x for x in filtered_events
if x["type"] in ["mousedown", "contextmenu"]
]
assert expected == mousedown_contextmenu_events
def test_click_element_center(session, test_actions_page, mouse_chain):
outer = session.find.css("#outer", all=False)
center = get_center(outer.rect)
mouse_chain.click(element=outer).perform()
events = get_events(session)
assert len(events) == 4
event_types = [e["type"] for e in events]
assert ["mousemove", "mousedown", "mouseup", "click"] == event_types
for e in events:
if e["type"] != "mousemove":
assert approx(e["pageX"], center["x"])
assert approx(e["pageY"], center["y"])
assert e["target"] == "outer"
def test_click_navigation(session, url):
destination = url("/webdriver/tests/actions/support/test_actions_wdspec.html")
start = link_doc(destination)
def click(link):
mouse_chain = session.actions.sequence(
"pointer", "pointer_id", {"pointerType": "mouse"})
mouse_chain.click(element=link).perform()
session.url = start
error_message = "Did not navigate to %s" % destination
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
# repeat steps to check behaviour after document unload
session.url = start
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
@pytest.mark.parametrize("drag_duration", [0, 300, 800])
@pytest.mark.parametrize("dx, dy",
[(20, 0), (0, 15), (10, 15), (-20, 0), (10, -15), (-10, -15)])
def test_drag_and_drop(session, test_actions_page, mouse_chain, dx, dy, drag_duration):
drag_target = session.find.css("#dragTarget", all=False)
initial_rect = drag_target.rect
initial_center = get_center(initial_rect)
# Conclude chain with extra move to allow time for last queued
# coordinate-update of drag_target and to test that drag_target is "dropped".
mouse_chain \
.pointer_move(0, 0, origin=drag_target) \
.pointer_down() \
.pointer_move(dx, dy, duration=drag_duration, origin="pointer") \
.pointer_up() \
.pointer_move(80, 50, duration=100, origin="pointer") \
.perform()
# mouseup that ends the drag is at the expected destination
e = get_events(session)[1]
assert e["type"] == "mouseup"
assert approx(e["pageX"], initial_center["x"] + dx)
assert approx(e["pageY"], initial_center["y"] + dy)
# check resulting location of the dragged element
final_rect = drag_target.rect
assert initial_rect["x"] + dx == final_rect["x"]
assert initial_rect["y"] + dy == final_rect["y"]
| mpl-2.0 |
onceuponatimeforever/oh-mainline | vendor/packages/django-tastypie/tastypie/management/commands/backfill_api_keys.py | 27 | 1076 | from __future__ import print_function
from __future__ import unicode_literals
from django.core.management.base import NoArgsCommand
from tastypie.compat import User
from tastypie.models import ApiKey
class Command(NoArgsCommand):
help = "Goes through all users and adds API keys for any that don't have one."
def handle_noargs(self, **options):
"""Goes through all users and adds API keys for any that don't have one."""
self.verbosity = int(options.get('verbosity', 1))
for user in User.objects.all().iterator():
try:
api_key = ApiKey.objects.get(user=user)
if not api_key.key:
# Autogenerate the key.
api_key.save()
if self.verbosity >= 1:
print(u"Generated a new key for '%s'" % user.username)
except ApiKey.DoesNotExist:
api_key = ApiKey.objects.create(user=user)
if self.verbosity >= 1:
print(u"Created a new key for '%s'" % user.username)
| agpl-3.0 |
Serenytics/mrq | tests/conftest.py | 1 | 9561 | from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import range
from builtins import str
from builtins import object
from past.builtins import basestring
import pytest
import os
try:
import subprocess32 as subprocess
except:
import subprocess
import sys
import psutil
import time
import re
import json
import urllib.request, urllib.error, urllib.parse
sys.path.append(os.getcwd())
from mrq.job import Job, queue_raw_jobs, queue_jobs
from mrq.queue import Queue
from mrq.config import get_config
from mrq.utils import wait_for_net_service
from mrq.context import connections, set_current_config
set_current_config(get_config(sources=("env")))
os.system("rm -rf dump.rdb")
class ProcessFixture(object):
def __init__(self, request, cmdline=None, wait_port=None, quiet=False):
self.request = request
self.cmdline = cmdline
self.process = None
self.wait_port = wait_port
self.quiet = quiet
self.stopped = False
self.request.addfinalizer(self.stop)
def start(self, cmdline=None, env=None, expected_children=0):
self.stopped = False
self.process_children = []
if not cmdline:
cmdline = self.cmdline
if env is None:
env = {}
# Kept from parent env
for env_key in ["PATH", "GEVENT_LOOP", "VIRTUAL_ENV"]:
if os.environ.get(env_key) and not env.get(env_key):
env[env_key] = os.environ.get(env_key)
if self.quiet:
stdout = open(os.devnull, 'w')
else:
stdout = None
self.cmdline = cmdline
# print cmdline
self.process = subprocess.Popen(re.split(r"\s+", cmdline) if isinstance(cmdline, basestring) else cmdline,
shell=False, close_fds=True, env=env, cwd=os.getcwd(), stdout=stdout)
if self.quiet:
stdout.close()
# Wait for children to start
if expected_children > 0:
psutil_process = psutil.Process(self.process.pid)
# print "Expecting %s children, got %s" % (expected_children,
# psutil_process.get_children(recursive=False))
while True:
self.process_children = psutil_process.get_children(
recursive=True)
if len(self.process_children) >= expected_children:
break
time.sleep(0.1)
if self.wait_port:
wait_for_net_service("127.0.0.1", int(self.wait_port), poll_interval=0.01)
def stop(self, force=False, timeout=None, block=True, sig=15):
# Call this only one time.
if self.stopped and not force:
return
self.stopped = True
if self.process is not None:
os.kill(self.process.pid, sig)
# When sending a sigkill to the process, we also want to kill the
# children in case of supervisord usage
if sig == 9 and len(self.process_children) > 0:
for c in self.process_children:
c.send_signal(sig)
if not block:
return
for _ in range(2000):
try:
p = psutil.Process(self.process.pid)
if p.status == "zombie":
# print "process %s zombie OK" % self.cmdline
return
except psutil.NoSuchProcess:
# print "process %s exit OK" % self.cmdline
return
time.sleep(0.01)
assert False, "Process '%s' was still in state %s after 20 seconds..." % (
self.cmdline, p.status)
class WorkerFixture(ProcessFixture):
def __init__(self, request, **kwargs):
ProcessFixture.__init__(self, request, cmdline=kwargs.get("cmdline"))
self.fixture_mongodb = kwargs["mongodb"]
self.fixture_redis = kwargs["redis"]
self.started = False
def start(self, flush=True, deps=True, trace=True, **kwargs):
self.started = True
if deps:
self.start_deps(flush=flush)
processes = 0
m = re.search(r"--processes (\d+)", kwargs.get("flags", ""))
if m:
processes = int(m.group(1))
cmdline = "python mrq/bin/mrq_worker.py --mongodb_logs_size 0 %s %s %s %s" % (
"--admin_port 20020" if (processes <= 1) else "",
"--trace_io --trace_greenlets" if trace else "",
kwargs.get("flags", ""),
kwargs.get("queues", "high default low")
)
# +1 because of supervisord itself
if processes > 0:
processes += 1
print(cmdline)
ProcessFixture.start(self, cmdline=cmdline, env=kwargs.get("env"), expected_children=processes)
def start_deps(self, flush=True):
self.fixture_mongodb.start()
self.fixture_redis.start()
# Will auto-connect
connections.reset()
self.mongodb_jobs = connections.mongodb_jobs
self.mongodb_logs = connections.mongodb_logs
self.redis = connections.redis
if flush:
self.fixture_mongodb.flush()
self.fixture_redis.flush()
def stop(self, deps=True, sig=2, **kwargs):
if self.started:
ProcessFixture.stop(self, sig=sig, **kwargs)
if deps:
self.stop_deps(**kwargs)
def stop_deps(self, **kwargs):
self.fixture_mongodb.stop(sig=2, **kwargs)
self.fixture_redis.stop(sig=2, **kwargs)
def wait_for_tasks_results(self, job_ids, block=True, accept_statuses=["success"]):
if not block:
return job_ids
results = []
for job_id in job_ids:
job = Job(job_id).wait(poll_interval=0.01)
assert job.get("status") in accept_statuses, "Job had status %s, not in %s. Dump: %s" % (
job.get("status"), accept_statuses, job)
results.append(job.get("result"))
return results
def send_raw_tasks(self, queue, params_list, start=True, block=True):
if not self.started and start:
self.start()
queue_raw_jobs(queue, params_list)
if block:
# Wait for the queue to be empty. Might be error-prone when tasks
# are in-memory between the 2
q = Queue(queue)
while q.size() > 0 or self.mongodb_jobs.mrq_jobs.find({"status": "started"}).count() > 0:
# print "S", q.size(),
# self.mongodb_jobs.mrq_jobs.find({"status":
# "started"}).count()
time.sleep(0.1)
def send_tasks(self, path, params_list, block=True, queue=None, accept_statuses=["success"], start=True):
if not self.started and start:
self.start()
job_ids = queue_jobs(path, params_list, queue=queue)
return self.wait_for_tasks_results(job_ids, block=block, accept_statuses=accept_statuses)
def send_task(self, path, params, **kwargs):
return self.send_tasks(path, [params], **kwargs)[0]
def send_task_cli(self, path, params, queue=None, **kwargs):
cli = ["python", "mrq/bin/mrq_run.py", "--quiet"]
if queue:
cli += ["--queue", queue]
cli += [path, json.dumps(params)]
out = subprocess.check_output(cli).strip()
if not queue:
return json.loads(out.decode('utf-8'))
return out
def get_report(self, with_memory=False):
wait_for_net_service("127.0.0.1", 20020, poll_interval=0.01)
f = urllib.request.urlopen("http://127.0.0.1:20020/report%s" % ("_mem" if with_memory else ""))
data = json.loads(f.read().decode('utf-8'))
f.close()
return data
class RedisFixture(ProcessFixture):
def flush(self):
connections.redis.flushall()
# Empty local known_queues cache too
Queue.known_queues = {}
class MongoFixture(ProcessFixture):
def flush(self):
for mongodb in (connections.mongodb_jobs, connections.mongodb_logs):
if mongodb:
for c in mongodb.collection_names():
if not c.startswith("system."):
mongodb.drop_collection(c)
@pytest.fixture(scope="function")
def httpstatic(request):
return ProcessFixture(request, "/usr/sbin/nginx -c /app/tests/fixtures/httpstatic/nginx.conf", wait_port=8081)
@pytest.fixture(scope="function")
def mongodb(request):
cmd = "mongod --smallfiles --noprealloc --nojournal"
if os.environ.get("STACK_STARTED"):
cmd = "sleep 1h"
return MongoFixture(request, cmd, wait_port=27017, quiet=True)
@pytest.fixture(scope="function")
def mongodb_with_journal(request):
cmd = "mongod --smallfiles --noprealloc"
if os.environ.get("STACK_STARTED"):
cmd = "sleep 1h"
return MongoFixture(request, cmd, wait_port=27017, quiet=True)
@pytest.fixture(scope="function")
def redis(request):
cmd = "redis-server"
if os.environ.get("STACK_STARTED"):
cmd = "sleep 1h"
return RedisFixture(request, cmd, wait_port=6379, quiet=True)
@pytest.fixture(scope="function")
def worker(request, mongodb, redis):
return WorkerFixture(request, mongodb=mongodb, redis=redis)
@pytest.fixture(scope="function")
def worker_mongodb_with_journal(request, mongodb_with_journal, redis):
return WorkerFixture(request, mongodb=mongodb_with_journal, redis=redis)
| mit |
pombredanne/PyMISP | examples/search.py | 2 | 1727 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key,misp_verifycert
import argparse
import os
import json
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
def search(m, quiet, url, controller, out=None, **kwargs):
result = m.search(controller, **kwargs)
if quiet:
for e in result['response']:
print('{}{}{}\n'.format(url, '/events/view/', e['Event']['id']))
elif out is None:
for e in result['response']:
print(json.dumps(e) + '\n')
else:
with open(out, 'w') as f:
for e in result['response']:
f.write(json.dumps(e) + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get all the events matching a value for a given param.')
parser.add_argument("-p", "--param", required=True, help="Parameter to search (e.g. category, org, etc.)")
parser.add_argument("-s", "--search", required=True, help="String to search.")
parser.add_argument("-a", "--attributes", action='store_true', help="Search attributes instead of events")
parser.add_argument("-q", "--quiet", action='store_true', help="Only display URLs to MISP")
parser.add_argument("-o", "--output", help="Output file")
args = parser.parse_args()
if args.output is not None and os.path.exists(args.output):
print('Output file already exists, abort.')
exit(0)
misp = init(misp_url, misp_key)
kwargs = {args.param: args.search}
if args.attributes:
controller='attributes'
else:
controller='events'
search(misp, args.quiet, misp_url, controller, args.output, **kwargs)
| bsd-2-clause |
matrixise/odoo | addons/portal_project_issue/tests/__init__.py | 167 | 1124 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_access_rights
checks = [
test_access_rights,
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
direvus/ansible | test/units/modules/cloud/amazon/test_aws_direct_connect_connection.py | 81 | 4165 | # (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from units.utils.amazon_placebo_fixtures import placeboify, maybe_sleep
from ansible.modules.cloud.amazon import aws_direct_connect_connection
from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn
class FakeModule(object):
def __init__(self, **kwargs):
self.params = kwargs
def fail_json(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
raise Exception('FAIL')
def exit_json(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
# When rerecording these tests, create a stand alone connection with default values in us-west-2
# with the name ansible-test-connection and set connection_id to the appropriate value
connection_id = "dxcon-fgq9rgot"
connection_name = 'ansible-test-connection'
def test_connection_status(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
status = aws_direct_connect_connection.connection_status(client, connection_id)['connection']
assert status['connectionName'] == connection_name
assert status['connectionId'] == connection_id
def test_connection_exists_by_id(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
exists = aws_direct_connect_connection.connection_exists(client, connection_id)
assert exists == connection_id
def test_connection_exists_by_name(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
exists = aws_direct_connect_connection.connection_exists(client, None, connection_name)
assert exists == connection_id
def test_connection_does_not_exist(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
exists = aws_direct_connect_connection.connection_exists(client, 'dxcon-notthere')
assert exists is False
def test_changed_properties(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
status = aws_direct_connect_connection.connection_status(client, connection_id)['connection']
location = "differentlocation"
bandwidth = status['bandwidth']
assert aws_direct_connect_connection.changed_properties(status, location, bandwidth) is True
def test_associations_are_not_updated(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
status = aws_direct_connect_connection.connection_status(client, connection_id)['connection']
lag_id = status.get('lagId')
assert aws_direct_connect_connection.update_associations(client, status, connection_id, lag_id) is False
def test_create_and_delete(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
created_conn = verify_create_works(placeboify, maybe_sleep, client)
deleted_conn = verify_delete_works(placeboify, maybe_sleep, client, created_conn)
def verify_create_works(placeboify, maybe_sleep, client):
created = aws_direct_connect_connection.create_connection(client=client,
location="EqSE2",
bandwidth="1Gbps",
name="ansible-test-2",
lag_id=None)
assert created.startswith('dxcon')
return created
def verify_delete_works(placeboify, maybe_sleep, client, conn_id):
changed = aws_direct_connect_connection.ensure_absent(client, conn_id)
assert changed is True
| gpl-3.0 |
mavit/ansible | lib/ansible/modules/network/iosxr/iosxr_interface.py | 24 | 26061 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: iosxr_interface
version_added: "2.4"
author:
- "Ganesh Nalawade (@ganeshrn)"
- "Kedar Kekan (@kedarX)"
short_description: Manage Interface on Cisco IOS XR network devices
description:
- This module provides declarative management of Interfaces
on Cisco IOS XR network devices.
extends_documentation_fragment: iosxr
notes:
- Tested against IOS XRv 6.1.2
- Preconfiguration of physical interfaces is not supported with C(netconf) transport.
options:
name:
description:
- Name of the interface to configure in C(type + path) format. e.g. C(GigabitEthernet0/0/0/0)
required: true
description:
description:
- Description of Interface being configured.
enabled:
description:
- Removes the shutdown configuration, which removes the forced administrative down on the interface,
enabling it to move to an up or down state.
type: bool
default: True
active:
description:
- Whether the interface is C(active) or C(preconfigured). Preconfiguration allows you to configure modular
services cards before they are inserted into the router. When the cards are inserted, they are instantly
configured. Active cards are the ones already inserted.
choices: ['active', 'preconfigure']
default: active
version_added: 2.5
speed:
description:
- Configure the speed for an interface. Default is auto-negotiation when not configured.
choices: ['10', '100', '1000']
mtu:
description:
- Sets the MTU value for the interface. Range is between 64 and 65535'
duplex:
description:
- Configures the interface duplex mode. Default is auto-negotiation when not configured.
choices: ['full', 'half']
tx_rate:
description:
- Transmit rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
rx_rate:
description:
- Receiver rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
aggregate:
description:
- List of Interface definitions. Include multiple interface configurations together,
one each on a separate line
delay:
description:
- Time in seconds to wait before checking for the operational state on remote
device. This wait is applicable for operational state argument which are
I(state) with values C(up)/C(down), I(tx_rate) and I(rx_rate).
default: 10
state:
description:
- State of the Interface configuration, C(up) means present and
operationally up and C(down) means present and operationally C(down)
default: present
choices: ['present', 'absent', 'up', 'down']
"""
EXAMPLES = """
- name: configure interface
iosxr_interface:
name: GigabitEthernet0/0/0/2
description: test-interface
speed: 100
duplex: half
mtu: 512
- name: remove interface
iosxr_interface:
name: GigabitEthernet0/0/0/2
state: absent
- name: make interface up
iosxr_interface:
name: GigabitEthernet0/0/0/2
enabled: True
- name: make interface down
iosxr_interface:
name: GigabitEthernet0/0/0/2
enabled: False
- name: Create interface using aggregate
iosxr_interface:
aggregate:
- name: GigabitEthernet0/0/0/3
- name: GigabitEthernet0/0/0/2
speed: 100
duplex: full
mtu: 512
state: present
- name: Create interface using aggregate along with additional params in aggregate
iosxr_interface:
aggregate:
- { name: GigabitEthernet0/0/0/3, description: test-interface 3 }
- { name: GigabitEthernet0/0/0/2, description: test-interface 2 }
speed: 100
duplex: full
mtu: 512
state: present
- name: Delete interface using aggregate
iosxr_interface:
aggregate:
- name: GigabitEthernet0/0/0/3
- name: GigabitEthernet0/0/0/2
state: absent
- name: Check intent arguments
iosxr_interface:
name: GigabitEthernet0/0/0/5
state: up
delay: 20
- name: Config + intent
iosxr_interface:
name: GigabitEthernet0/0/0/5
enabled: False
state: down
delay: 20
"""
RETURN = """
commands:
description: The list of configuration mode commands sent to device with transport C(cli)
returned: always (empty list when no commands to send)
type: list
sample:
- interface GigabitEthernet0/0/0/2
- description test-interface
- duplex half
- mtu 512
xml:
description: NetConf rpc xml sent to device with transport C(netconf)
returned: always (empty list when no xml rpc to send)
type: list
version_added: 2.5
sample:
- '<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<interface-configurations xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg">
<interface-configuration xc:operation="merge">
<active>act</active>
<interface-name>GigabitEthernet0/0/0/0</interface-name>
<description>test-interface-0</description>
<mtus><mtu>
<owner>GigabitEthernet</owner>
<mtu>512</mtu>
</mtu></mtus>
<ethernet xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-drivers-media-eth-cfg">
<speed>100</speed>
<duplex>half</duplex>
</ethernet>
</interface-configuration>
</interface-configurations></config>'
"""
import re
from time import sleep
from copy import deepcopy
import collections
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.iosxr.iosxr import get_config, load_config, build_xml
from ansible.module_utils.network.iosxr.iosxr import run_commands, iosxr_argument_spec, get_oper
from ansible.module_utils.network.iosxr.iosxr import is_netconf, is_cliconf, etree_findall, etree_find
from ansible.module_utils.network.common.utils import conditional, remove_default_spec
def validate_mtu(value):
if value and not 64 <= int(value) <= 65535:
return False, 'mtu must be between 64 and 65535'
return True, None
class ConfigBase(object):
def __init__(self, module):
self._module = module
self._result = {'changed': False, 'warnings': []}
self._want = list()
self._have = list()
def validate_param_values(self, param=None):
for key, value in param.items():
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if callable(validator):
rc, msg = validator(value)
if not rc:
self._module.fail_json(msg=msg)
def map_params_to_obj(self):
aggregate = self._module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = self._module.params[key]
self.validate_param_values(item)
d = item.copy()
match = re.match(r"(^[a-z]+)([0-9/]+$)", d['name'], re.I)
if match:
d['owner'] = match.groups()[0]
if d['active'] == 'preconfigure':
d['active'] = 'pre'
else:
d['active'] = 'act'
self._want.append(d)
else:
self.validate_param_values(self._module.params)
params = {
'name': self._module.params['name'],
'description': self._module.params['description'],
'speed': self._module.params['speed'],
'mtu': self._module.params['mtu'],
'duplex': self._module.params['duplex'],
'state': self._module.params['state'],
'delay': self._module.params['delay'],
'tx_rate': self._module.params['tx_rate'],
'rx_rate': self._module.params['rx_rate'],
'enabled': self._module.params['enabled'],
'active': self._module.params['active'],
}
match = re.match(r"(^[a-z]+)([0-9/]+$)", params['name'], re.I)
if match:
params['owner'] = match.groups()[0]
if params['active'] == 'preconfigure':
params['active'] = 'pre'
else:
params['active'] = 'act'
self._want.append(params)
class CliConfiguration(ConfigBase):
def __init__(self, module):
super(CliConfiguration, self).__init__(module)
def parse_shutdown(self, intf_config):
for cfg in intf_config:
match = re.search(r'%s' % 'shutdown', cfg, re.M)
if match:
return True
return False
def parse_config_argument(self, intf_config, arg):
for cfg in intf_config:
match = re.search(r'%s (.+)$' % arg, cfg, re.M)
if match:
return match.group(1)
def search_obj_in_list(self, name):
for obj in self._have:
if obj['name'] == name:
return obj
return None
def map_config_to_obj(self):
data = get_config(self._module, config_filter='interface')
interfaces = data.strip().rstrip('!').split('!')
if not interfaces:
return list()
for interface in interfaces:
intf_config = interface.strip().splitlines()
name = intf_config[0].strip().split()[1]
active = 'act'
if name == 'preconfigure':
active = 'pre'
name = intf_config[0].strip().split()[2]
obj = {
'name': name,
'description': self.parse_config_argument(intf_config, 'description'),
'speed': self.parse_config_argument(intf_config, 'speed'),
'duplex': self.parse_config_argument(intf_config, 'duplex'),
'mtu': self.parse_config_argument(intf_config, 'mtu'),
'enabled': True if not self.parse_shutdown(intf_config) else False,
'active': active,
'state': 'present'
}
self._have.append(obj)
def map_obj_to_commands(self):
commands = list()
args = ('speed', 'description', 'duplex', 'mtu')
for want_item in self._want:
name = want_item['name']
disable = not want_item['enabled']
state = want_item['state']
obj_in_have = self.search_obj_in_list(name)
interface = 'interface ' + name
if state == 'absent' and obj_in_have:
commands.append('no ' + interface)
elif state in ('present', 'up', 'down'):
if obj_in_have:
for item in args:
candidate = want_item.get(item)
running = obj_in_have.get(item)
if candidate != running:
if candidate:
cmd = interface + ' ' + item + ' ' + str(candidate)
commands.append(cmd)
if disable and obj_in_have.get('enabled', False):
commands.append(interface + ' shutdown')
elif not disable and not obj_in_have.get('enabled', False):
commands.append('no ' + interface + ' shutdown')
else:
for item in args:
value = want_item.get(item)
if value:
commands.append(interface + ' ' + item + ' ' + str(value))
if not disable:
commands.append('no ' + interface + ' shutdown')
self._result['commands'] = commands
if commands:
commit = not self._module.check_mode
diff = load_config(self._module, commands, commit=commit)
if diff:
self._result['diff'] = dict(prepared=diff)
self._result['changed'] = True
def check_declarative_intent_params(self):
failed_conditions = []
for want_item in self._want:
want_state = want_item.get('state')
want_tx_rate = want_item.get('tx_rate')
want_rx_rate = want_item.get('rx_rate')
if want_state not in ('up', 'down') and not want_tx_rate and not want_rx_rate:
continue
if self._result['changed']:
sleep(want_item['delay'])
command = 'show interfaces {!s}'.format(want_item['name'])
out = run_commands(self._module, command)[0]
if want_state in ('up', 'down'):
match = re.search(r'%s (\w+)' % 'line protocol is', out, re.M)
have_state = None
if match:
have_state = match.group(1)
if have_state.strip() == 'administratively':
match = re.search(r'%s (\w+)' % 'administratively', out, re.M)
if match:
have_state = match.group(1)
if have_state is None or not conditional(want_state, have_state.strip()):
failed_conditions.append('state ' + 'eq({!s})'.format(want_state))
if want_tx_rate:
match = re.search(r'%s (\d+)' % 'output rate', out, re.M)
have_tx_rate = None
if match:
have_tx_rate = match.group(1)
if have_tx_rate is None or not conditional(want_tx_rate, have_tx_rate.strip(), cast=int):
failed_conditions.append('tx_rate ' + want_tx_rate)
if want_rx_rate:
match = re.search(r'%s (\d+)' % 'input rate', out, re.M)
have_rx_rate = None
if match:
have_rx_rate = match.group(1)
if have_rx_rate is None or not conditional(want_rx_rate, have_rx_rate.strip(), cast=int):
failed_conditions.append('rx_rate ' + want_rx_rate)
if failed_conditions:
msg = 'One or more conditional statements have not been satisfied'
self._module.fail_json(msg=msg, failed_conditions=failed_conditions)
def run(self):
self.map_params_to_obj()
self.map_config_to_obj()
self.map_obj_to_commands()
self.check_declarative_intent_params()
return self._result
class NCConfiguration(ConfigBase):
def __init__(self, module):
super(NCConfiguration, self).__init__(module)
self._intf_meta = collections.OrderedDict()
self._shut_meta = collections.OrderedDict()
self._data_rate_meta = collections.OrderedDict()
self._line_state_meta = collections.OrderedDict()
def map_obj_to_xml_rpc(self):
self._intf_meta.update([
('interface-configuration', {'xpath': 'interface-configurations/interface-configuration', 'tag': True, 'attrib': 'operation'}),
('a:active', {'xpath': 'interface-configurations/interface-configuration/active', 'operation': 'edit'}),
('a:name', {'xpath': 'interface-configurations/interface-configuration/interface-name'}),
('a:description', {'xpath': 'interface-configurations/interface-configuration/description', 'operation': 'edit'}),
('mtus', {'xpath': 'interface-configurations/interface-configuration/mtus', 'tag': True, 'operation': 'edit'}),
('mtu', {'xpath': 'interface-configurations/interface-configuration/mtus/mtu', 'tag': True, 'operation': 'edit'}),
('a:owner', {'xpath': 'interface-configurations/interface-configuration/mtus/mtu/owner', 'operation': 'edit'}),
('a:mtu', {'xpath': 'interface-configurations/interface-configuration/mtus/mtu/mtu', 'operation': 'edit'}),
('CEthernet', {'xpath': 'interface-configurations/interface-configuration/ethernet', 'tag': True, 'operation': 'edit', 'ns': True}),
('a:speed', {'xpath': 'interface-configurations/interface-configuration/ethernet/speed', 'operation': 'edit'}),
('a:duplex', {'xpath': 'interface-configurations/interface-configuration/ethernet/duplex', 'operation': 'edit'}),
])
self._shut_meta.update([
('interface-configuration', {'xpath': 'interface-configurations/interface-configuration', 'tag': True}),
('a:active', {'xpath': 'interface-configurations/interface-configuration/active', 'operation': 'edit'}),
('a:name', {'xpath': 'interface-configurations/interface-configuration/interface-name'}),
('shutdown', {'xpath': 'interface-configurations/interface-configuration/shutdown', 'tag': True, 'operation': 'edit', 'attrib': 'operation'}),
])
state = self._module.params['state']
_get_filter = build_xml('interface-configurations', xmap=self._intf_meta, params=self._want, opcode="filter")
running = get_config(self._module, source='running', config_filter=_get_filter)
intfcfg_nodes = etree_findall(running, 'interface-configuration')
intf_list = set()
shut_list = set()
for item in intfcfg_nodes:
intf_name = etree_find(item, 'interface-name').text
if intf_name is not None:
intf_list.add(intf_name)
if etree_find(item, 'shutdown') is not None:
shut_list.add(intf_name)
intf_params = list()
shut_params = list()
noshut_params = list()
for index, item in enumerate(self._want):
if item['name'] in intf_list:
intf_params.append(item)
if not item['enabled']:
shut_params.append(item)
if item['name'] in shut_list and item['enabled']:
noshut_params.append(item)
opcode = None
if state == 'absent':
if intf_params:
opcode = "delete"
elif state in ('present', 'up', 'down'):
intf_params = self._want
opcode = 'merge'
self._result['xml'] = []
_edit_filter_list = list()
if opcode:
_edit_filter_list.append(build_xml('interface-configurations', xmap=self._intf_meta,
params=intf_params, opcode=opcode))
if opcode == 'merge':
if len(shut_params):
_edit_filter_list.append(build_xml('interface-configurations', xmap=self._shut_meta,
params=shut_params, opcode='merge'))
if len(noshut_params):
_edit_filter_list.append(build_xml('interface-configurations', xmap=self._shut_meta,
params=noshut_params, opcode='delete'))
diff = None
if len(_edit_filter_list):
commit = not self._module.check_mode
diff = load_config(self._module, _edit_filter_list, commit=commit, running=running,
nc_get_filter=_get_filter)
if diff:
if self._module._diff:
self._result['diff'] = dict(prepared=diff)
self._result['xml'] = _edit_filter_list
self._result['changed'] = True
def check_declarative_intent_params(self):
failed_conditions = []
self._data_rate_meta.update([
('interfaces', {'xpath': 'infra-statistics/interfaces', 'tag': True}),
('interface', {'xpath': 'infra-statistics/interfaces/interface', 'tag': True}),
('a:name', {'xpath': 'infra-statistics/interfaces/interface/interface-name'}),
('cache', {'xpath': 'infra-statistics/interfaces/interface/cache', 'tag': True}),
('data-rate', {'xpath': 'infra-statistics/interfaces/interface/cache/data-rate', 'tag': True}),
('input-data-rate', {'xpath': 'infra-statistics/interfaces/interface/cache/data-rate/input-data-rate', 'tag': True}),
('output-data-rate', {'xpath': 'infra-statistics/interfaces/interface/cache/data-rate/output-data-rate', 'tag': True}),
])
self._line_state_meta.update([
('data-nodes', {'xpath': 'interface-properties/data-nodes', 'tag': True}),
('data-node', {'xpath': 'interface-properties/data-nodes/data-node', 'tag': True}),
('system-view', {'xpath': 'interface-properties/data-nodes/data-node/system-view', 'tag': True}),
('interfaces', {'xpath': 'interface-properties/data-nodes/data-node/system-view/interfaces', 'tag': True}),
('interface', {'xpath': 'interface-properties/data-nodes/data-node/system-view/interfaces/interface', 'tag': True}),
('a:name', {'xpath': 'interface-properties/data-nodes/data-node/system-view/interfaces/interface/interface-name'}),
('line-state', {'xpath': 'interface-properties/data-nodes/data-node/system-view/interfaces/interface/line-state', 'tag': True}),
])
_rate_filter = build_xml('infra-statistics', xmap=self._data_rate_meta, params=self._want, opcode="filter")
out = get_oper(self._module, filter=_rate_filter)
data_rate_list = etree_findall(out, 'interface')
data_rate_map = dict()
for item in data_rate_list:
data_rate_map.update({etree_find(item, 'interface-name').text: dict()})
data_rate_map[etree_find(item, 'interface-name').text].update({'input-data-rate': etree_find(item, 'input-data-rate').text,
'output-data-rate': etree_find(item, 'output-data-rate').text})
_line_state_filter = build_xml('interface-properties', xmap=self._line_state_meta, params=self._want, opcode="filter")
out = get_oper(self._module, filter=_line_state_filter)
line_state_list = etree_findall(out, 'interface')
line_state_map = dict()
for item in line_state_list:
line_state_map.update({etree_find(item, 'interface-name').text: etree_find(item, 'line-state').text})
for want_item in self._want:
want_state = want_item.get('state')
want_tx_rate = want_item.get('tx_rate')
want_rx_rate = want_item.get('rx_rate')
if want_state not in ('up', 'down') and not want_tx_rate and not want_rx_rate:
continue
if self._result['changed']:
sleep(want_item['delay'])
if want_state in ('up', 'down'):
if want_state not in line_state_map[want_item['name']]:
failed_conditions.append('state ' + 'eq({!s})'.format(want_state))
if want_tx_rate:
if want_tx_rate != data_rate_map[want_item['name']]['output-data-rate']:
failed_conditions.append('tx_rate ' + want_tx_rate)
if want_rx_rate:
if want_rx_rate != data_rate_map[want_item['name']]['input-data-rate']:
failed_conditions.append('rx_rate ' + want_rx_rate)
if failed_conditions:
msg = 'One or more conditional statements have not been satisfied'
self._module.fail_json(msg=msg, failed_conditions=failed_conditions)
def run(self):
self.map_params_to_obj()
self.map_obj_to_xml_rpc()
self.check_declarative_intent_params()
return self._result
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(type='str'),
description=dict(type='str'),
speed=dict(choices=['10', '100', '1000']),
mtu=dict(),
duplex=dict(choices=['full', 'half']),
enabled=dict(default=True, type='bool'),
active=dict(default='active', type='str', choices=['active', 'preconfigure']),
tx_rate=dict(),
rx_rate=dict(),
delay=dict(default=10, type='int'),
state=dict(default='present',
choices=['present', 'absent', 'up', 'down'])
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(iosxr_argument_spec)
required_one_of = [['name', 'aggregate']]
mutually_exclusive = [['name', 'aggregate']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
config_object = None
if is_cliconf(module):
module.deprecate("cli support for 'iosxr_interface' is deprecated. Use transport netconf instead",
version='2.9')
config_object = CliConfiguration(module)
elif is_netconf(module):
if module.params['active'] == 'preconfigure':
module.fail_json(msg="Physical interface pre-configuration is not supported with transport 'netconf'")
config_object = NCConfiguration(module)
result = {}
if config_object:
result = config_object.run()
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Juniper/contrail-dev-neutron | neutron/plugins/hyperv/hyperv_neutron_plugin.py | 7 | 14210 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Alessandro Pilotti, Cloudbase Solutions Srl
from oslo.config import cfg
from neutron.api.v2 import attributes
from neutron.common import exceptions as n_exc
from neutron.common import topics
from neutron.db import agents_db
from neutron.db import db_base_plugin_v2
from neutron.db import external_net_db
from neutron.db import l3_gwmode_db
from neutron.db import portbindings_base
from neutron.db import quota_db # noqa
from neutron.extensions import portbindings
from neutron.extensions import providernet as provider
from neutron.openstack.common import log as logging
from neutron.openstack.common import rpc
from neutron.plugins.common import constants as svc_constants
from neutron.plugins.common import utils as plugin_utils
from neutron.plugins.hyperv import agent_notifier_api
from neutron.plugins.hyperv.common import constants
from neutron.plugins.hyperv import db as hyperv_db
from neutron.plugins.hyperv import rpc_callbacks
DEFAULT_VLAN_RANGES = []
hyperv_opts = [
cfg.StrOpt('tenant_network_type', default='local',
help=_("Network type for tenant networks "
"(local, flat, vlan or none)")),
cfg.ListOpt('network_vlan_ranges',
default=DEFAULT_VLAN_RANGES,
help=_("List of <physical_network>:<vlan_min>:<vlan_max> "
"or <physical_network>")),
]
cfg.CONF.register_opts(hyperv_opts, "HYPERV")
LOG = logging.getLogger(__name__)
class BaseNetworkProvider(object):
def __init__(self):
self._db = hyperv_db.HyperVPluginDB()
def create_network(self, session, attrs):
pass
def delete_network(self, session, binding):
pass
def extend_network_dict(self, network, binding):
pass
class LocalNetworkProvider(BaseNetworkProvider):
def create_network(self, session, attrs):
network_type = attrs.get(provider.NETWORK_TYPE)
segmentation_id = attrs.get(provider.SEGMENTATION_ID)
if attributes.is_attr_set(segmentation_id):
msg = _("segmentation_id specified "
"for %s network") % network_type
raise n_exc.InvalidInput(error_message=msg)
attrs[provider.SEGMENTATION_ID] = None
physical_network = attrs.get(provider.PHYSICAL_NETWORK)
if attributes.is_attr_set(physical_network):
msg = _("physical_network specified "
"for %s network") % network_type
raise n_exc.InvalidInput(error_message=msg)
attrs[provider.PHYSICAL_NETWORK] = None
def extend_network_dict(self, network, binding):
network[provider.PHYSICAL_NETWORK] = None
network[provider.SEGMENTATION_ID] = None
class FlatNetworkProvider(BaseNetworkProvider):
def create_network(self, session, attrs):
network_type = attrs.get(provider.NETWORK_TYPE)
segmentation_id = attrs.get(provider.SEGMENTATION_ID)
if attributes.is_attr_set(segmentation_id):
msg = _("segmentation_id specified "
"for %s network") % network_type
raise n_exc.InvalidInput(error_message=msg)
segmentation_id = constants.FLAT_VLAN_ID
attrs[provider.SEGMENTATION_ID] = segmentation_id
physical_network = attrs.get(provider.PHYSICAL_NETWORK)
if not attributes.is_attr_set(physical_network):
physical_network = self._db.reserve_flat_net(session)
attrs[provider.PHYSICAL_NETWORK] = physical_network
else:
self._db.reserve_specific_flat_net(session, physical_network)
def delete_network(self, session, binding):
self._db.release_vlan(session, binding.physical_network,
constants.FLAT_VLAN_ID)
def extend_network_dict(self, network, binding):
network[provider.PHYSICAL_NETWORK] = binding.physical_network
class VlanNetworkProvider(BaseNetworkProvider):
def create_network(self, session, attrs):
segmentation_id = attrs.get(provider.SEGMENTATION_ID)
if attributes.is_attr_set(segmentation_id):
physical_network = attrs.get(provider.PHYSICAL_NETWORK)
if not attributes.is_attr_set(physical_network):
msg = _("physical_network not provided")
raise n_exc.InvalidInput(error_message=msg)
self._db.reserve_specific_vlan(session, physical_network,
segmentation_id)
else:
(physical_network,
segmentation_id) = self._db.reserve_vlan(session)
attrs[provider.SEGMENTATION_ID] = segmentation_id
attrs[provider.PHYSICAL_NETWORK] = physical_network
def delete_network(self, session, binding):
self._db.release_vlan(
session, binding.physical_network,
binding.segmentation_id)
def extend_network_dict(self, network, binding):
network[provider.PHYSICAL_NETWORK] = binding.physical_network
network[provider.SEGMENTATION_ID] = binding.segmentation_id
class HyperVNeutronPlugin(agents_db.AgentDbMixin,
db_base_plugin_v2.NeutronDbPluginV2,
external_net_db.External_net_db_mixin,
l3_gwmode_db.L3_NAT_db_mixin,
portbindings_base.PortBindingBaseMixin):
# This attribute specifies whether the plugin supports or not
# bulk operations. Name mangling is used in order to ensure it
# is qualified by class
__native_bulk_support = True
supported_extension_aliases = ["provider", "external-net", "router",
"agent", "ext-gw-mode", "binding", "quotas"]
def __init__(self, configfile=None):
self._db = hyperv_db.HyperVPluginDB()
self._db.initialize()
self.base_binding_dict = {
portbindings.VIF_TYPE: portbindings.VIF_TYPE_HYPERV}
portbindings_base.register_port_dict_function()
self._set_tenant_network_type()
self._parse_network_vlan_ranges()
self._create_network_providers_map()
self._db.sync_vlan_allocations(self._network_vlan_ranges)
self._setup_rpc()
def _set_tenant_network_type(self):
tenant_network_type = cfg.CONF.HYPERV.tenant_network_type
if tenant_network_type not in [svc_constants.TYPE_LOCAL,
svc_constants.TYPE_FLAT,
svc_constants.TYPE_VLAN,
svc_constants.TYPE_NONE]:
msg = _(
"Invalid tenant_network_type: %s. "
"Agent terminated!") % tenant_network_type
raise n_exc.InvalidInput(error_message=msg)
self._tenant_network_type = tenant_network_type
def _setup_rpc(self):
# RPC support
self.service_topics = {svc_constants.CORE: topics.PLUGIN,
svc_constants.L3_ROUTER_NAT: topics.L3PLUGIN}
self.conn = rpc.create_connection(new=True)
self.notifier = agent_notifier_api.AgentNotifierApi(
topics.AGENT)
self.callbacks = rpc_callbacks.HyperVRpcCallbacks(self.notifier)
self.dispatcher = self.callbacks.create_rpc_dispatcher()
for svc_topic in self.service_topics.values():
self.conn.create_consumer(svc_topic, self.dispatcher, fanout=False)
# Consume from all consumers in a thread
self.conn.consume_in_thread()
def _parse_network_vlan_ranges(self):
self._network_vlan_ranges = plugin_utils.parse_network_vlan_ranges(
cfg.CONF.HYPERV.network_vlan_ranges)
LOG.info(_("Network VLAN ranges: %s"), self._network_vlan_ranges)
def _check_vlan_id_in_range(self, physical_network, vlan_id):
for r in self._network_vlan_ranges[physical_network]:
if vlan_id >= r[0] and vlan_id <= r[1]:
return True
return False
def _create_network_providers_map(self):
self._network_providers_map = {
svc_constants.TYPE_LOCAL: LocalNetworkProvider(),
svc_constants.TYPE_FLAT: FlatNetworkProvider(),
svc_constants.TYPE_VLAN: VlanNetworkProvider()
}
def _process_provider_create(self, context, session, attrs):
network_type = attrs.get(provider.NETWORK_TYPE)
network_type_set = attributes.is_attr_set(network_type)
if not network_type_set:
if self._tenant_network_type == svc_constants.TYPE_NONE:
raise n_exc.TenantNetworksDisabled()
network_type = self._tenant_network_type
attrs[provider.NETWORK_TYPE] = network_type
if network_type not in self._network_providers_map:
msg = _("Network type %s not supported") % network_type
raise n_exc.InvalidInput(error_message=msg)
p = self._network_providers_map[network_type]
# Provider specific network creation
p.create_network(session, attrs)
def create_network(self, context, network):
session = context.session
with session.begin(subtransactions=True):
network_attrs = network['network']
self._process_provider_create(context, session, network_attrs)
net = super(HyperVNeutronPlugin, self).create_network(
context, network)
network_type = network_attrs[provider.NETWORK_TYPE]
physical_network = network_attrs[provider.PHYSICAL_NETWORK]
segmentation_id = network_attrs[provider.SEGMENTATION_ID]
self._db.add_network_binding(
session, net['id'], network_type,
physical_network, segmentation_id)
self._process_l3_create(context, net, network['network'])
self._extend_network_dict_provider(context, net)
LOG.debug(_("Created network: %s"), net['id'])
return net
def _extend_network_dict_provider(self, context, network):
binding = self._db.get_network_binding(
context.session, network['id'])
network[provider.NETWORK_TYPE] = binding.network_type
p = self._network_providers_map[binding.network_type]
p.extend_network_dict(network, binding)
def update_network(self, context, id, network):
provider._raise_if_updates_provider_attributes(network['network'])
session = context.session
with session.begin(subtransactions=True):
net = super(HyperVNeutronPlugin, self).update_network(context, id,
network)
self._process_l3_update(context, net, network['network'])
self._extend_network_dict_provider(context, net)
return net
def delete_network(self, context, id):
session = context.session
with session.begin(subtransactions=True):
binding = self._db.get_network_binding(session, id)
super(HyperVNeutronPlugin, self).delete_network(context, id)
p = self._network_providers_map[binding.network_type]
p.delete_network(session, binding)
# the network_binding record is deleted via cascade from
# the network record, so explicit removal is not necessary
self.notifier.network_delete(context, id)
def get_network(self, context, id, fields=None):
net = super(HyperVNeutronPlugin, self).get_network(context, id, None)
self._extend_network_dict_provider(context, net)
return self._fields(net, fields)
def get_networks(self, context, filters=None, fields=None):
nets = super(HyperVNeutronPlugin, self).get_networks(
context, filters, None)
for net in nets:
self._extend_network_dict_provider(context, net)
return [self._fields(net, fields) for net in nets]
def create_port(self, context, port):
port_data = port['port']
port = super(HyperVNeutronPlugin, self).create_port(context, port)
self._process_portbindings_create_and_update(context,
port_data,
port)
return port
def update_port(self, context, id, port):
original_port = super(HyperVNeutronPlugin, self).get_port(
context, id)
port_data = port['port']
port = super(HyperVNeutronPlugin, self).update_port(context, id, port)
self._process_portbindings_create_and_update(context,
port_data,
port)
if original_port['admin_state_up'] != port['admin_state_up']:
binding = self._db.get_network_binding(
None, port['network_id'])
self.notifier.port_update(context, port,
binding.network_type,
binding.segmentation_id,
binding.physical_network)
return port
def delete_port(self, context, id, l3_port_check=True):
# if needed, check to see if this is a port owned by
# and l3-router. If so, we should prevent deletion.
if l3_port_check:
self.prevent_l3_port_deletion(context, id)
self.disassociate_floatingips(context, id)
super(HyperVNeutronPlugin, self).delete_port(context, id)
self.notifier.port_delete(context, id)
| apache-2.0 |
coronary/RandomEpisode | depends/Lib/site-packages/pip/_vendor/retrying.py | 934 | 9972 | ## Copyright 2013-2014 Ray Holder
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import random
from pip._vendor import six
import sys
import time
import traceback
# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
MAX_WAIT = 1073741823
def retry(*dargs, **dkw):
"""
Decorator function that instantiates the Retrying object
@param *dargs: positional arguments passed to Retrying object
@param **dkw: keyword arguments passed to the Retrying object
"""
# support both @retry and @retry() as valid syntax
if len(dargs) == 1 and callable(dargs[0]):
def wrap_simple(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying().call(f, *args, **kw)
return wrapped_f
return wrap_simple(dargs[0])
else:
def wrap(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying(*dargs, **dkw).call(f, *args, **kw)
return wrapped_f
return wrap
class Retrying(object):
def __init__(self,
stop=None, wait=None,
stop_max_attempt_number=None,
stop_max_delay=None,
wait_fixed=None,
wait_random_min=None, wait_random_max=None,
wait_incrementing_start=None, wait_incrementing_increment=None,
wait_exponential_multiplier=None, wait_exponential_max=None,
retry_on_exception=None,
retry_on_result=None,
wrap_exception=False,
stop_func=None,
wait_func=None,
wait_jitter_max=None):
self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number
self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay
self._wait_fixed = 1000 if wait_fixed is None else wait_fixed
self._wait_random_min = 0 if wait_random_min is None else wait_random_min
self._wait_random_max = 1000 if wait_random_max is None else wait_random_max
self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start
self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment
self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier
self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max
self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max
# TODO add chaining of stop behaviors
# stop behavior
stop_funcs = []
if stop_max_attempt_number is not None:
stop_funcs.append(self.stop_after_attempt)
if stop_max_delay is not None:
stop_funcs.append(self.stop_after_delay)
if stop_func is not None:
self.stop = stop_func
elif stop is None:
self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs)
else:
self.stop = getattr(self, stop)
# TODO add chaining of wait behaviors
# wait behavior
wait_funcs = [lambda *args, **kwargs: 0]
if wait_fixed is not None:
wait_funcs.append(self.fixed_sleep)
if wait_random_min is not None or wait_random_max is not None:
wait_funcs.append(self.random_sleep)
if wait_incrementing_start is not None or wait_incrementing_increment is not None:
wait_funcs.append(self.incrementing_sleep)
if wait_exponential_multiplier is not None or wait_exponential_max is not None:
wait_funcs.append(self.exponential_sleep)
if wait_func is not None:
self.wait = wait_func
elif wait is None:
self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs)
else:
self.wait = getattr(self, wait)
# retry on exception filter
if retry_on_exception is None:
self._retry_on_exception = self.always_reject
else:
self._retry_on_exception = retry_on_exception
# TODO simplify retrying by Exception types
# retry on result filter
if retry_on_result is None:
self._retry_on_result = self.never_reject
else:
self._retry_on_result = retry_on_result
self._wrap_exception = wrap_exception
def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the previous attempt >= stop_max_attempt_number."""
return previous_attempt_number >= self._stop_max_attempt_number
def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the time from the first attempt >= stop_max_delay."""
return delay_since_first_attempt_ms >= self._stop_max_delay
def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Don't sleep at all before retrying."""
return 0
def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a fixed amount of time between each retry."""
return self._wait_fixed
def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a random amount of time between wait_random_min and wait_random_max"""
return random.randint(self._wait_random_min, self._wait_random_max)
def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""
Sleep an incremental amount of time after each attempt, starting at
wait_incrementing_start and incrementing by wait_incrementing_increment
"""
result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1))
if result < 0:
result = 0
return result
def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
exp = 2 ** previous_attempt_number
result = self._wait_exponential_multiplier * exp
if result > self._wait_exponential_max:
result = self._wait_exponential_max
if result < 0:
result = 0
return result
def never_reject(self, result):
return False
def always_reject(self, result):
return True
def should_reject(self, attempt):
reject = False
if attempt.has_exception:
reject |= self._retry_on_exception(attempt.value[1])
else:
reject |= self._retry_on_result(attempt.value)
return reject
def call(self, fn, *args, **kwargs):
start_time = int(round(time.time() * 1000))
attempt_number = 1
while True:
try:
attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
except:
tb = sys.exc_info()
attempt = Attempt(tb, attempt_number, True)
if not self.should_reject(attempt):
return attempt.get(self._wrap_exception)
delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time
if self.stop(attempt_number, delay_since_first_attempt_ms):
if not self._wrap_exception and attempt.has_exception:
# get() on an attempt with an exception should cause it to be raised, but raise just in case
raise attempt.get()
else:
raise RetryError(attempt)
else:
sleep = self.wait(attempt_number, delay_since_first_attempt_ms)
if self._wait_jitter_max:
jitter = random.random() * self._wait_jitter_max
sleep = sleep + max(0, jitter)
time.sleep(sleep / 1000.0)
attempt_number += 1
class Attempt(object):
"""
An Attempt encapsulates a call to a target function that may end as a
normal return value from the function or an Exception depending on what
occurred during the execution.
"""
def __init__(self, value, attempt_number, has_exception):
self.value = value
self.attempt_number = attempt_number
self.has_exception = has_exception
def get(self, wrap_exception=False):
"""
Return the return value of this Attempt instance or raise an Exception.
If wrap_exception is true, this Attempt is wrapped inside of a
RetryError before being raised.
"""
if self.has_exception:
if wrap_exception:
raise RetryError(self)
else:
six.reraise(self.value[0], self.value[1], self.value[2])
else:
return self.value
def __repr__(self):
if self.has_exception:
return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2])))
else:
return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)
class RetryError(Exception):
"""
A RetryError encapsulates the last Attempt instance right before giving up.
"""
def __init__(self, last_attempt):
self.last_attempt = last_attempt
def __str__(self):
return "RetryError[{0}]".format(self.last_attempt)
| mit |
jbking/demo-appengine-django-golang | myproject/django/core/servers/basehttp.py | 46 | 6634 | """
HTTP server that implements the Python WSGI protocol (PEP 333, rev 1.21).
Based on wsgiref.simple_server which is part of the standard library since 2.5.
This is a simple server for use in testing or debugging Django apps. It hasn't
been reviewed for security issues. DON'T USE IT FOR PRODUCTION USE!
"""
from __future__ import unicode_literals
import os
import socket
import sys
import traceback
try:
from urllib.parse import urljoin
except ImportError: # Python 2
from urlparse import urljoin
from django.utils.six.moves import socketserver
from wsgiref import simple_server
from wsgiref.util import FileWrapper # for backwards compatibility
import django
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style
from django.core.wsgi import get_wsgi_application
from django.utils.importlib import import_module
__all__ = ['WSGIServer', 'WSGIRequestHandler']
def get_internal_wsgi_application():
"""
Loads and returns the WSGI application as configured by the user in
``settings.WSGI_APPLICATION``. With the default ``startproject`` layout,
this will be the ``application`` object in ``projectname/wsgi.py``.
This function, and the ``WSGI_APPLICATION`` setting itself, are only useful
for Django's internal servers (runserver, runfcgi); external WSGI servers
should just be configured to point to the correct application object
directly.
If settings.WSGI_APPLICATION is not set (is ``None``), we just return
whatever ``django.core.wsgi.get_wsgi_application`` returns.
"""
from django.conf import settings
app_path = getattr(settings, 'WSGI_APPLICATION')
if app_path is None:
return get_wsgi_application()
module_name, attr = app_path.rsplit('.', 1)
try:
mod = import_module(module_name)
except ImportError as e:
raise ImproperlyConfigured(
"WSGI application '%s' could not be loaded; "
"could not import module '%s': %s" % (app_path, module_name, e))
try:
app = getattr(mod, attr)
except AttributeError as e:
raise ImproperlyConfigured(
"WSGI application '%s' could not be loaded; "
"can't find '%s' in module '%s': %s"
% (app_path, attr, module_name, e))
return app
class ServerHandler(simple_server.ServerHandler, object):
error_status = str("500 INTERNAL SERVER ERROR")
def write(self, data):
"""'write()' callable as specified by PEP 3333"""
assert isinstance(data, bytes), "write() argument must be bytestring"
if not self.status:
raise AssertionError("write() before start_response()")
elif not self.headers_sent:
# Before the first output, send the stored headers
self.bytes_sent = len(data) # make sure we know content-length
self.send_headers()
else:
self.bytes_sent += len(data)
# XXX check Content-Length and truncate if too many bytes written?
# If data is too large, socket will choke, so write chunks no larger
# than 32MB at a time.
length = len(data)
if length > 33554432:
offset = 0
while offset < length:
chunk_size = min(33554432, length)
self._write(data[offset:offset+chunk_size])
self._flush()
offset += chunk_size
else:
self._write(data)
self._flush()
def error_output(self, environ, start_response):
super(ServerHandler, self).error_output(environ, start_response)
return ['\n'.join(traceback.format_exception(*sys.exc_info()))]
# Backport of http://hg.python.org/cpython/rev/d5af1b235dab. See #16241.
# This can be removed when support for Python <= 2.7.3 is deprecated.
def finish_response(self):
try:
if not self.result_is_file() or not self.sendfile():
for data in self.result:
self.write(data)
self.finish_content()
finally:
self.close()
class WSGIServer(simple_server.WSGIServer, object):
"""BaseHTTPServer that implements the Python WSGI protocol"""
def __init__(self, *args, **kwargs):
if kwargs.pop('ipv6', False):
self.address_family = socket.AF_INET6
super(WSGIServer, self).__init__(*args, **kwargs)
def server_bind(self):
"""Override server_bind to store the server name."""
super(WSGIServer, self).server_bind()
self.setup_environ()
class WSGIRequestHandler(simple_server.WSGIRequestHandler, object):
def __init__(self, *args, **kwargs):
from django.conf import settings
self.admin_static_prefix = urljoin(settings.STATIC_URL, 'admin/')
# We set self.path to avoid crashes in log_message() on unsupported
# requests (like "OPTIONS").
self.path = ''
self.style = color_style()
super(WSGIRequestHandler, self).__init__(*args, **kwargs)
def address_string(self):
# Short-circuit parent method to not call socket.getfqdn
return self.client_address[0]
def log_message(self, format, *args):
# Don't bother logging requests for admin images or the favicon.
if (self.path.startswith(self.admin_static_prefix)
or self.path == '/favicon.ico'):
return
msg = "[%s] %s\n" % (self.log_date_time_string(), format % args)
# Utilize terminal colors, if available
if args[1][0] == '2':
# Put 2XX first, since it should be the common case
msg = self.style.HTTP_SUCCESS(msg)
elif args[1][0] == '1':
msg = self.style.HTTP_INFO(msg)
elif args[1] == '304':
msg = self.style.HTTP_NOT_MODIFIED(msg)
elif args[1][0] == '3':
msg = self.style.HTTP_REDIRECT(msg)
elif args[1] == '404':
msg = self.style.HTTP_NOT_FOUND(msg)
elif args[1][0] == '4':
msg = self.style.HTTP_BAD_REQUEST(msg)
else:
# Any 5XX, or any other response
msg = self.style.HTTP_SERVER_ERROR(msg)
sys.stderr.write(msg)
def run(addr, port, wsgi_handler, ipv6=False, threading=False):
server_address = (addr, port)
if threading:
httpd_cls = type(str('WSGIServer'), (socketserver.ThreadingMixIn, WSGIServer), {})
else:
httpd_cls = WSGIServer
httpd = httpd_cls(server_address, WSGIRequestHandler, ipv6=ipv6)
httpd.set_app(wsgi_handler)
httpd.serve_forever()
| mit |
KenKundert/abraxas | abraxas/secrets.py | 1 | 4131 | # Generate a Secret
import hashlib
import string
# Globals {{{1
DEFAULT_PASSPHRASE_LENGTH = 4
DEFAULT_PASSWORD_LENGTH = 12
DEFAULT_SEPARATOR = ' '
DEFAULT_ALPHABET = string.ascii_letters + string.digits
# Utilities {{{1
# Partition a string into chunks, each of chars_per_chunk characters, and return
# them one at a time until either the string is exhausted or num_chars have been
# returned. In this case, the string consists of hexadecimal characters (0-9,
# a-f), and so each chunk can be treated as a hexadecimal number or
# chars_per_chunk digits.
def _partition(hexstr, chars_per_chunk, num_chunks):
max_chars = len(hexstr)
for index in range(num_chunks):
start = index*chars_per_chunk
end = (index + 1)*chars_per_chunk
if end > max_chars:
break
yield hexstr[start:end]
# Pass phrase class {{{1
# Reads a dictionary and generates a pass phrase using those words.
# The dictionary is contained in a file either in the settings directory
# or the install directory.
class Passphrase():
def __init__(self, report):
self.report = report
# Check to see if we can access all the words in the dictionary. {{{2
def check_length(self, words, bits):
num_words = len(words)
max_words = 2**bits
if num_words > max_words:
self.report(' '.join([
"There are more words in the dictionary (%s)" % (num_words),
"than can be used (%s). The rest are ignored." % (max_words)]))
# Generate a passphrase {{{2
def generate(self, master_password, account, dictionary, salt=''):
key = salt
key += account.get_version()
key += account.get_id()
key += master_password
digest = hashlib.sha512((key).encode('utf-8')).hexdigest()
length = account.get_num_words(DEFAULT_PASSPHRASE_LENGTH)
separator = account.get_separator(DEFAULT_SEPARATOR)
words = dictionary.get_words()
# Generate pass phrase
phrase = []
self.check_length(words, 16)
for chunk in _partition(digest, 4, length):
# chunk is a string that contains 4 hexadecimal digits (the
# characters '0'-'9' and 'a'-'f'). It is converted into an integer
# between 0 and 65535 that is then used as an index to choose a
# word from the dictionary.
index = int(chunk, 16) % len(words)
phrase += [words[index]]
passphrase = separator.join(phrase)
return account.get_prefix() + passphrase + account.get_suffix()
# Password class {{{1
# Generates a password from an alphabet.
class Password():
def __init__(self, report):
self.report = report
# Check to see if we can access all the characters in our alphabet. {{{2
def check_length(self, alphabet, bits):
num_chars = len(alphabet)
max_chars = 2**bits
if num_chars > max_chars:
self.report(' '.join([
"There are more characters in the alphabet" % (self.path),
"(%s) than can be used (%s)." % (num_chars, max_chars),
"The rest are ignored."]))
# Generate a password {{{2
def generate(self, master_password, account, salt=''):
key = salt
key += account.get_version()
key += account.get_id()
key += master_password
digest = hashlib.sha512((key).encode('utf-8')).hexdigest()
length = account.get_num_chars(DEFAULT_PASSWORD_LENGTH)
# Generate password
password = ''
alphabet = account.get_alphabet(DEFAULT_ALPHABET)
self.check_length(alphabet, 8)
for chunk in _partition(digest, 2, length):
# chunk is a string that contains 2 hexadecimal digits (the
# characters '0'-'9' and 'a'-'f'). It is converted into an integer
# between 0 and 255 that is then used as an index to choose a
# word from the alphabet.
index = int(chunk, 16) % len(alphabet)
password += alphabet[index]
return (account.get_prefix() + password + account.get_suffix())
| gpl-3.0 |
potpath/django-rest-framework | tests/test_response.py | 68 | 10811 | from __future__ import unicode_literals
from django.conf.urls import include, url
from django.test import TestCase
from django.utils import six
from rest_framework import generics, routers, serializers, status, viewsets
from rest_framework.renderers import (
BaseRenderer, BrowsableAPIRenderer, JSONRenderer
)
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_framework.views import APIView
from tests.models import BasicModel
# Serializer used to test BasicModel
class BasicModelSerializer(serializers.ModelSerializer):
class Meta:
model = BasicModel
class MockPickleRenderer(BaseRenderer):
media_type = 'application/pickle'
class MockJsonRenderer(BaseRenderer):
media_type = 'application/json'
class MockTextMediaRenderer(BaseRenderer):
media_type = 'text/html'
DUMMYSTATUS = status.HTTP_200_OK
DUMMYCONTENT = 'dummycontent'
def RENDERER_A_SERIALIZER(x):
return ('Renderer A: %s' % x).encode('ascii')
def RENDERER_B_SERIALIZER(x):
return ('Renderer B: %s' % x).encode('ascii')
class RendererA(BaseRenderer):
media_type = 'mock/renderera'
format = "formata"
def render(self, data, media_type=None, renderer_context=None):
return RENDERER_A_SERIALIZER(data)
class RendererB(BaseRenderer):
media_type = 'mock/rendererb'
format = "formatb"
def render(self, data, media_type=None, renderer_context=None):
return RENDERER_B_SERIALIZER(data)
class RendererC(RendererB):
media_type = 'mock/rendererc'
format = 'formatc'
charset = "rendererc"
class MockView(APIView):
renderer_classes = (RendererA, RendererB, RendererC)
def get(self, request, **kwargs):
return Response(DUMMYCONTENT, status=DUMMYSTATUS)
class MockViewSettingContentType(APIView):
renderer_classes = (RendererA, RendererB, RendererC)
def get(self, request, **kwargs):
return Response(DUMMYCONTENT, status=DUMMYSTATUS, content_type='setbyview')
class HTMLView(APIView):
renderer_classes = (BrowsableAPIRenderer, )
def get(self, request, **kwargs):
return Response('text')
class HTMLView1(APIView):
renderer_classes = (BrowsableAPIRenderer, JSONRenderer)
def get(self, request, **kwargs):
return Response('text')
class HTMLNewModelViewSet(viewsets.ModelViewSet):
serializer_class = BasicModelSerializer
queryset = BasicModel.objects.all()
class HTMLNewModelView(generics.ListCreateAPIView):
renderer_classes = (BrowsableAPIRenderer,)
permission_classes = []
serializer_class = BasicModelSerializer
queryset = BasicModel.objects.all()
new_model_viewset_router = routers.DefaultRouter()
new_model_viewset_router.register(r'', HTMLNewModelViewSet)
urlpatterns = [
url(r'^setbyview$', MockViewSettingContentType.as_view(renderer_classes=[RendererA, RendererB, RendererC])),
url(r'^.*\.(?P<format>.+)$', MockView.as_view(renderer_classes=[RendererA, RendererB, RendererC])),
url(r'^$', MockView.as_view(renderer_classes=[RendererA, RendererB, RendererC])),
url(r'^html$', HTMLView.as_view()),
url(r'^html1$', HTMLView1.as_view()),
url(r'^html_new_model$', HTMLNewModelView.as_view()),
url(r'^html_new_model_viewset', include(new_model_viewset_router.urls)),
url(r'^restframework', include('rest_framework.urls', namespace='rest_framework'))
]
# TODO: Clean tests bellow - remove duplicates with above, better unit testing, ...
class RendererIntegrationTests(TestCase):
"""
End-to-end testing of renderers using an ResponseMixin on a generic view.
"""
urls = 'tests.test_response'
def test_default_renderer_serializes_content(self):
"""If the Accept header is not set the default renderer should serialize the response."""
resp = self.client.get('/')
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_head_method_serializes_no_content(self):
"""No response must be included in HEAD requests."""
resp = self.client.head('/')
self.assertEqual(resp.status_code, DUMMYSTATUS)
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, six.b(''))
def test_default_renderer_serializes_content_on_accept_any(self):
"""If the Accept header is set to */* the default renderer should serialize the response."""
resp = self.client.get('/', HTTP_ACCEPT='*/*')
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_default_case(self):
"""If the Accept header is set the specified renderer should serialize the response.
(In this case we check that works for the default renderer)"""
resp = self.client.get('/', HTTP_ACCEPT=RendererA.media_type)
self.assertEqual(resp['Content-Type'], RendererA.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_A_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_non_default_case(self):
"""If the Accept header is set the specified renderer should serialize the response.
(In this case we check that works for a non-default renderer)"""
resp = self.client.get('/', HTTP_ACCEPT=RendererB.media_type)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_on_accept_query(self):
"""The '_accept' query string should behave in the same way as the Accept header."""
param = '?%s=%s' % (
api_settings.URL_ACCEPT_OVERRIDE,
RendererB.media_type
)
resp = self.client.get('/' + param)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_on_format_query(self):
"""If a 'format' query is specified, the renderer with the matching
format attribute should serialize the response."""
resp = self.client.get('/?format=%s' % RendererB.format)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_serializes_content_on_format_kwargs(self):
"""If a 'format' keyword arg is specified, the renderer with the matching
format attribute should serialize the response."""
resp = self.client.get('/something.formatb')
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
def test_specified_renderer_is_used_on_format_query_with_matching_accept(self):
"""If both a 'format' query and a matching Accept header specified,
the renderer with the matching format attribute should serialize the response."""
resp = self.client.get('/?format=%s' % RendererB.format,
HTTP_ACCEPT=RendererB.media_type)
self.assertEqual(resp['Content-Type'], RendererB.media_type + '; charset=utf-8')
self.assertEqual(resp.content, RENDERER_B_SERIALIZER(DUMMYCONTENT))
self.assertEqual(resp.status_code, DUMMYSTATUS)
class Issue122Tests(TestCase):
"""
Tests that covers #122.
"""
urls = 'tests.test_response'
def test_only_html_renderer(self):
"""
Test if no infinite recursion occurs.
"""
self.client.get('/html')
def test_html_renderer_is_first(self):
"""
Test if no infinite recursion occurs.
"""
self.client.get('/html1')
class Issue467Tests(TestCase):
"""
Tests for #467
"""
urls = 'tests.test_response'
def test_form_has_label_and_help_text(self):
resp = self.client.get('/html_new_model')
self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8')
# self.assertContains(resp, 'Text comes here')
# self.assertContains(resp, 'Text description.')
class Issue807Tests(TestCase):
"""
Covers #807
"""
urls = 'tests.test_response'
def test_does_not_append_charset_by_default(self):
"""
Renderers don't include a charset unless set explicitly.
"""
headers = {"HTTP_ACCEPT": RendererA.media_type}
resp = self.client.get('/', **headers)
expected = "{0}; charset={1}".format(RendererA.media_type, 'utf-8')
self.assertEqual(expected, resp['Content-Type'])
def test_if_there_is_charset_specified_on_renderer_it_gets_appended(self):
"""
If renderer class has charset attribute declared, it gets appended
to Response's Content-Type
"""
headers = {"HTTP_ACCEPT": RendererC.media_type}
resp = self.client.get('/', **headers)
expected = "{0}; charset={1}".format(RendererC.media_type, RendererC.charset)
self.assertEqual(expected, resp['Content-Type'])
def test_content_type_set_explicitly_on_response(self):
"""
The content type may be set explicitly on the response.
"""
headers = {"HTTP_ACCEPT": RendererC.media_type}
resp = self.client.get('/setbyview', **headers)
self.assertEqual('setbyview', resp['Content-Type'])
def test_viewset_label_help_text(self):
param = '?%s=%s' % (
api_settings.URL_ACCEPT_OVERRIDE,
'text/html'
)
resp = self.client.get('/html_new_model_viewset/' + param)
self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8')
# self.assertContains(resp, 'Text comes here')
# self.assertContains(resp, 'Text description.')
def test_form_has_label_and_help_text(self):
resp = self.client.get('/html_new_model')
self.assertEqual(resp['Content-Type'], 'text/html; charset=utf-8')
# self.assertContains(resp, 'Text comes here')
# self.assertContains(resp, 'Text description.')
| bsd-2-clause |
imk1/IMKTFBindingCode | makeGetMergedPeakSignalsScript.py | 1 | 2401 | import sys
import argparse
from itertools import izip
def parseArgument():
# Parse the input
parser=argparse.ArgumentParser(description=\
"Get the positions with the top scores and their scores")
parser.add_argument("--bedFileNameList", required=True,\
help='File with list of bed files')
parser.add_argument("--signalFileNameList", required=True,\
help='File with list of signal files, ordered in the same way as bedFileNameList')
parser.add_argument("--numSignalsMerged", type=int, default=2, required=False,\
help='Number of signals per merged peak file')
parser.add_argument("--outputFileNamePrefixList", required=True,\
help='File with list of output name prefixes, where there are numSignalsMerged per bed/signal file, ordered in the same way as bedFileNameList')
parser.add_argument("--scriptFileName", required=True,\
help='Output file name for script')
options = parser.parse_args()
return options
def makeGetMergedPeakSignalsScript(options):
# Make a script that will get signals for the merged peaks and record them according to bin indexes
bedFileNameListFile = open(options.bedFileNameList)
signalFileNameListFile = open(options.signalFileNameList)
outputFileNamePrefixListFile = open(options.outputFileNamePrefixList)
scriptFile = open(options.scriptFileName)
for bedFileNameStr, signalFileNameStr in izip((bedFileNameListFile, signalFileNameListFile)):
# Iterate through the merged peak files and create lines in the script for each
bedFileName = bedFileNameStr.strip()
signalFileName = signalFileNameStr.strip()
for i in range(options.numSignalsMerged):
# Iterate through the merged signals and create a separate file for each
outputFileNamePrefix = outputFileNamePrefixListFile.readline().strip()
for j in range(22):
# Iterate through the chromosomes and make a separate file for each
chrom = "chr" + str(j)
outputFileName = "{0}.{1}.{2}\n".format(outputFileNamePrefix, chrom, "txt")
col = str(i + 4)
scriptString = "{0} {1} {2} {3} {4} {5} {6}\n".format("paste", bedFileName, signalFileName, "| grep -P'", + chrom + "\\t' | sort -u -k1,1 -k2,2n -k3,3n | cut -f1," + col, "| awk '{print NR, \"\\t\", $0}' | awk 'BEGIN{OFS=\"\\t\"} {print $2,$1,$1+1,$3}' >", outputFileName)
scriptFile.write(scriptString)
if __name__ == "__main__":
options = parseArgument()
makeGetMergedPeakSignalsScript(options)
| mit |
dutradda/alquimia | alquimia/models_attrs_reflect.py | 1 | 4950 | # Copyright 2015 Diogo Dutra
# This file is part of alquimia.
# alquimia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from sqlalchemy.orm import relationship
from alquimia.utils import log
class OneToOneManyToManyError(Exception):
def __init__(self, model_name, rel_name, logger=logging):
message = '%s.%s is a one-to-one relationship but ' \
'was mapped as many-to-many!' % (model_name, rel_name)
log(logger, 'critical', message)
Exception.__init__(self, message)
class ModelsAtrrsReflect(dict):
def __init__(self, metadata, logger=logging, *args):
self._logger = logger
self._metadata = metadata
self._rels = {}
self._build(*args)
def _build_rel_instance(self, rel_name, table_name, update_kargs={}):
kwargs = {'cascade': 'all'}
kwargs.update(update_kargs)
self[table_name][rel_name] = relationship(rel_name, **kwargs)
def _add_rel(self, rel_type, rel_name, table_name, args={}):
self[table_name][rel_type].append(rel_name)
self[table_name]['relationships'].append(rel_name)
self._build_rel_instance(rel_name, table_name, args)
def _build_many_to_many_rel(self, rel_name, table_name, mtm_table):
if rel_name == table_name:
raise OneToOneManyToManyError(table_name, rel_name)
args = {'secondary': mtm_table}
self._add_rel('mtm', rel_name, table_name, args)
self._add_rel('mtm', table_name, rel_name, args)
def _build_many_to_one_rel(self, rel_name, table_name):
self._add_rel('mto', rel_name, table_name)
args = {'cascade': 'all,delete-orphan'}
self._add_rel('otm', table_name, rel_name, args)
def _build_one_to_one_rel(self, rel_name, table_name, id_column=None):
args = {'uselist': False, 'single_parent': True,
'cascade': 'all,delete-orphan'}
if id_column is not None:
args['remote_side'] = [id_column]
self._add_rel('oto', rel_name, table_name, args)
if not rel_name == table_name:
self._add_rel('oto', table_name, rel_name, args)
def _build_relationships(self, table):
for fk in table.foreign_keys:
rel_name = fk.column.table.name
id_column = table.c['id'] if rel_name == table.name else None
if id_column is not None or fk.parent.unique:
self._build_one_to_one_rel(rel_name, table.name, id_column)
else:
self._build_many_to_one_rel(rel_name, table.name)
mtm_tables = list(self._mtm_tables.values())
for mtm_table in mtm_tables:
mtm_rels = list(mtm_table.columns.keys())
table_rel = table.name+'_id'
if table_rel in mtm_rels:
mtm_rels.remove(table_rel)
rel_name = mtm_rels[0][:-3]
self._build_many_to_many_rel(rel_name, table.name,
mtm_table.name)
self._mtm_tables.pop(mtm_table.name)
def _keep_mtm_tables(self):
self._mtm_tables = {}
for table in self._metadata.tables.values():
fks = table.foreign_keys
if len(fks) == len(table.c) == 2:
is_pks = True
for fk in fks:
if not fk.column.primary_key:
is_pks = False
break
if not is_pks:
continue
self._mtm_tables[table.name] = table
def _init_attrs(self, table_name):
self[table_name] = {
'mtm': [],
'mto': [],
'oto': [],
'otm': [],
'relationships': [],
'columns': [],
'session': None
}
def _build(self, *args):
self._metadata.reflect()
self._keep_mtm_tables()
attrs = {}
tables = [table for table in self._metadata.tables.values() \
if table.name not in self._mtm_tables]
for table in tables:
self._init_attrs(str(table.name))
for table in tables:
if table.name not in self._mtm_tables:
self._build_relationships(table)
self[table.name]['__table__'] = table
| lgpl-3.0 |
Peddle/hue | desktop/core/ext-py/pysaml2-2.4.0/example/sp-repoze/attributemaps/basic.py | 112 | 22565 |
MAP = {
"identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
"fro": {
'urn:mace:dir:attribute-def:aRecord': 'aRecord',
'urn:mace:dir:attribute-def:aliasedEntryName': 'aliasedEntryName',
'urn:mace:dir:attribute-def:aliasedObjectName': 'aliasedObjectName',
'urn:mace:dir:attribute-def:associatedDomain': 'associatedDomain',
'urn:mace:dir:attribute-def:associatedName': 'associatedName',
'urn:mace:dir:attribute-def:audio': 'audio',
'urn:mace:dir:attribute-def:authorityRevocationList': 'authorityRevocationList',
'urn:mace:dir:attribute-def:buildingName': 'buildingName',
'urn:mace:dir:attribute-def:businessCategory': 'businessCategory',
'urn:mace:dir:attribute-def:c': 'c',
'urn:mace:dir:attribute-def:cACertificate': 'cACertificate',
'urn:mace:dir:attribute-def:cNAMERecord': 'cNAMERecord',
'urn:mace:dir:attribute-def:carLicense': 'carLicense',
'urn:mace:dir:attribute-def:certificateRevocationList': 'certificateRevocationList',
'urn:mace:dir:attribute-def:cn': 'cn',
'urn:mace:dir:attribute-def:co': 'co',
'urn:mace:dir:attribute-def:commonName': 'commonName',
'urn:mace:dir:attribute-def:countryName': 'countryName',
'urn:mace:dir:attribute-def:crossCertificatePair': 'crossCertificatePair',
'urn:mace:dir:attribute-def:dITRedirect': 'dITRedirect',
'urn:mace:dir:attribute-def:dSAQuality': 'dSAQuality',
'urn:mace:dir:attribute-def:dc': 'dc',
'urn:mace:dir:attribute-def:deltaRevocationList': 'deltaRevocationList',
'urn:mace:dir:attribute-def:departmentNumber': 'departmentNumber',
'urn:mace:dir:attribute-def:description': 'description',
'urn:mace:dir:attribute-def:destinationIndicator': 'destinationIndicator',
'urn:mace:dir:attribute-def:displayName': 'displayName',
'urn:mace:dir:attribute-def:distinguishedName': 'distinguishedName',
'urn:mace:dir:attribute-def:dmdName': 'dmdName',
'urn:mace:dir:attribute-def:dnQualifier': 'dnQualifier',
'urn:mace:dir:attribute-def:documentAuthor': 'documentAuthor',
'urn:mace:dir:attribute-def:documentIdentifier': 'documentIdentifier',
'urn:mace:dir:attribute-def:documentLocation': 'documentLocation',
'urn:mace:dir:attribute-def:documentPublisher': 'documentPublisher',
'urn:mace:dir:attribute-def:documentTitle': 'documentTitle',
'urn:mace:dir:attribute-def:documentVersion': 'documentVersion',
'urn:mace:dir:attribute-def:domainComponent': 'domainComponent',
'urn:mace:dir:attribute-def:drink': 'drink',
'urn:mace:dir:attribute-def:eduOrgHomePageURI': 'eduOrgHomePageURI',
'urn:mace:dir:attribute-def:eduOrgIdentityAuthNPolicyURI': 'eduOrgIdentityAuthNPolicyURI',
'urn:mace:dir:attribute-def:eduOrgLegalName': 'eduOrgLegalName',
'urn:mace:dir:attribute-def:eduOrgSuperiorURI': 'eduOrgSuperiorURI',
'urn:mace:dir:attribute-def:eduOrgWhitePagesURI': 'eduOrgWhitePagesURI',
'urn:mace:dir:attribute-def:eduPersonAffiliation': 'eduPersonAffiliation',
'urn:mace:dir:attribute-def:eduPersonEntitlement': 'eduPersonEntitlement',
'urn:mace:dir:attribute-def:eduPersonNickname': 'eduPersonNickname',
'urn:mace:dir:attribute-def:eduPersonOrgDN': 'eduPersonOrgDN',
'urn:mace:dir:attribute-def:eduPersonOrgUnitDN': 'eduPersonOrgUnitDN',
'urn:mace:dir:attribute-def:eduPersonPrimaryAffiliation': 'eduPersonPrimaryAffiliation',
'urn:mace:dir:attribute-def:eduPersonPrimaryOrgUnitDN': 'eduPersonPrimaryOrgUnitDN',
'urn:mace:dir:attribute-def:eduPersonPrincipalName': 'eduPersonPrincipalName',
'urn:mace:dir:attribute-def:eduPersonScopedAffiliation': 'eduPersonScopedAffiliation',
'urn:mace:dir:attribute-def:eduPersonTargetedID': 'eduPersonTargetedID',
'urn:mace:dir:attribute-def:email': 'email',
'urn:mace:dir:attribute-def:emailAddress': 'emailAddress',
'urn:mace:dir:attribute-def:employeeNumber': 'employeeNumber',
'urn:mace:dir:attribute-def:employeeType': 'employeeType',
'urn:mace:dir:attribute-def:enhancedSearchGuide': 'enhancedSearchGuide',
'urn:mace:dir:attribute-def:facsimileTelephoneNumber': 'facsimileTelephoneNumber',
'urn:mace:dir:attribute-def:favouriteDrink': 'favouriteDrink',
'urn:mace:dir:attribute-def:fax': 'fax',
'urn:mace:dir:attribute-def:federationFeideSchemaVersion': 'federationFeideSchemaVersion',
'urn:mace:dir:attribute-def:friendlyCountryName': 'friendlyCountryName',
'urn:mace:dir:attribute-def:generationQualifier': 'generationQualifier',
'urn:mace:dir:attribute-def:givenName': 'givenName',
'urn:mace:dir:attribute-def:gn': 'gn',
'urn:mace:dir:attribute-def:homePhone': 'homePhone',
'urn:mace:dir:attribute-def:homePostalAddress': 'homePostalAddress',
'urn:mace:dir:attribute-def:homeTelephoneNumber': 'homeTelephoneNumber',
'urn:mace:dir:attribute-def:host': 'host',
'urn:mace:dir:attribute-def:houseIdentifier': 'houseIdentifier',
'urn:mace:dir:attribute-def:info': 'info',
'urn:mace:dir:attribute-def:initials': 'initials',
'urn:mace:dir:attribute-def:internationaliSDNNumber': 'internationaliSDNNumber',
'urn:mace:dir:attribute-def:janetMailbox': 'janetMailbox',
'urn:mace:dir:attribute-def:jpegPhoto': 'jpegPhoto',
'urn:mace:dir:attribute-def:knowledgeInformation': 'knowledgeInformation',
'urn:mace:dir:attribute-def:l': 'l',
'urn:mace:dir:attribute-def:labeledURI': 'labeledURI',
'urn:mace:dir:attribute-def:localityName': 'localityName',
'urn:mace:dir:attribute-def:mDRecord': 'mDRecord',
'urn:mace:dir:attribute-def:mXRecord': 'mXRecord',
'urn:mace:dir:attribute-def:mail': 'mail',
'urn:mace:dir:attribute-def:mailPreferenceOption': 'mailPreferenceOption',
'urn:mace:dir:attribute-def:manager': 'manager',
'urn:mace:dir:attribute-def:member': 'member',
'urn:mace:dir:attribute-def:mobile': 'mobile',
'urn:mace:dir:attribute-def:mobileTelephoneNumber': 'mobileTelephoneNumber',
'urn:mace:dir:attribute-def:nSRecord': 'nSRecord',
'urn:mace:dir:attribute-def:name': 'name',
'urn:mace:dir:attribute-def:norEduOrgAcronym': 'norEduOrgAcronym',
'urn:mace:dir:attribute-def:norEduOrgNIN': 'norEduOrgNIN',
'urn:mace:dir:attribute-def:norEduOrgSchemaVersion': 'norEduOrgSchemaVersion',
'urn:mace:dir:attribute-def:norEduOrgUniqueIdentifier': 'norEduOrgUniqueIdentifier',
'urn:mace:dir:attribute-def:norEduOrgUniqueNumber': 'norEduOrgUniqueNumber',
'urn:mace:dir:attribute-def:norEduOrgUnitUniqueIdentifier': 'norEduOrgUnitUniqueIdentifier',
'urn:mace:dir:attribute-def:norEduOrgUnitUniqueNumber': 'norEduOrgUnitUniqueNumber',
'urn:mace:dir:attribute-def:norEduPersonBirthDate': 'norEduPersonBirthDate',
'urn:mace:dir:attribute-def:norEduPersonLIN': 'norEduPersonLIN',
'urn:mace:dir:attribute-def:norEduPersonNIN': 'norEduPersonNIN',
'urn:mace:dir:attribute-def:o': 'o',
'urn:mace:dir:attribute-def:objectClass': 'objectClass',
'urn:mace:dir:attribute-def:organizationName': 'organizationName',
'urn:mace:dir:attribute-def:organizationalStatus': 'organizationalStatus',
'urn:mace:dir:attribute-def:organizationalUnitName': 'organizationalUnitName',
'urn:mace:dir:attribute-def:otherMailbox': 'otherMailbox',
'urn:mace:dir:attribute-def:ou': 'ou',
'urn:mace:dir:attribute-def:owner': 'owner',
'urn:mace:dir:attribute-def:pager': 'pager',
'urn:mace:dir:attribute-def:pagerTelephoneNumber': 'pagerTelephoneNumber',
'urn:mace:dir:attribute-def:personalSignature': 'personalSignature',
'urn:mace:dir:attribute-def:personalTitle': 'personalTitle',
'urn:mace:dir:attribute-def:photo': 'photo',
'urn:mace:dir:attribute-def:physicalDeliveryOfficeName': 'physicalDeliveryOfficeName',
'urn:mace:dir:attribute-def:pkcs9email': 'pkcs9email',
'urn:mace:dir:attribute-def:postOfficeBox': 'postOfficeBox',
'urn:mace:dir:attribute-def:postalAddress': 'postalAddress',
'urn:mace:dir:attribute-def:postalCode': 'postalCode',
'urn:mace:dir:attribute-def:preferredDeliveryMethod': 'preferredDeliveryMethod',
'urn:mace:dir:attribute-def:preferredLanguage': 'preferredLanguage',
'urn:mace:dir:attribute-def:presentationAddress': 'presentationAddress',
'urn:mace:dir:attribute-def:protocolInformation': 'protocolInformation',
'urn:mace:dir:attribute-def:pseudonym': 'pseudonym',
'urn:mace:dir:attribute-def:registeredAddress': 'registeredAddress',
'urn:mace:dir:attribute-def:rfc822Mailbox': 'rfc822Mailbox',
'urn:mace:dir:attribute-def:roleOccupant': 'roleOccupant',
'urn:mace:dir:attribute-def:roomNumber': 'roomNumber',
'urn:mace:dir:attribute-def:sOARecord': 'sOARecord',
'urn:mace:dir:attribute-def:searchGuide': 'searchGuide',
'urn:mace:dir:attribute-def:secretary': 'secretary',
'urn:mace:dir:attribute-def:seeAlso': 'seeAlso',
'urn:mace:dir:attribute-def:serialNumber': 'serialNumber',
'urn:mace:dir:attribute-def:singleLevelQuality': 'singleLevelQuality',
'urn:mace:dir:attribute-def:sn': 'sn',
'urn:mace:dir:attribute-def:st': 'st',
'urn:mace:dir:attribute-def:stateOrProvinceName': 'stateOrProvinceName',
'urn:mace:dir:attribute-def:street': 'street',
'urn:mace:dir:attribute-def:streetAddress': 'streetAddress',
'urn:mace:dir:attribute-def:subtreeMaximumQuality': 'subtreeMaximumQuality',
'urn:mace:dir:attribute-def:subtreeMinimumQuality': 'subtreeMinimumQuality',
'urn:mace:dir:attribute-def:supportedAlgorithms': 'supportedAlgorithms',
'urn:mace:dir:attribute-def:supportedApplicationContext': 'supportedApplicationContext',
'urn:mace:dir:attribute-def:surname': 'surname',
'urn:mace:dir:attribute-def:telephoneNumber': 'telephoneNumber',
'urn:mace:dir:attribute-def:teletexTerminalIdentifier': 'teletexTerminalIdentifier',
'urn:mace:dir:attribute-def:telexNumber': 'telexNumber',
'urn:mace:dir:attribute-def:textEncodedORAddress': 'textEncodedORAddress',
'urn:mace:dir:attribute-def:title': 'title',
'urn:mace:dir:attribute-def:uid': 'uid',
'urn:mace:dir:attribute-def:uniqueIdentifier': 'uniqueIdentifier',
'urn:mace:dir:attribute-def:uniqueMember': 'uniqueMember',
'urn:mace:dir:attribute-def:userCertificate': 'userCertificate',
'urn:mace:dir:attribute-def:userClass': 'userClass',
'urn:mace:dir:attribute-def:userPKCS12': 'userPKCS12',
'urn:mace:dir:attribute-def:userPassword': 'userPassword',
'urn:mace:dir:attribute-def:userSMIMECertificate': 'userSMIMECertificate',
'urn:mace:dir:attribute-def:userid': 'userid',
'urn:mace:dir:attribute-def:x121Address': 'x121Address',
'urn:mace:dir:attribute-def:x500UniqueIdentifier': 'x500UniqueIdentifier',
},
"to": {
'aRecord': 'urn:mace:dir:attribute-def:aRecord',
'aliasedEntryName': 'urn:mace:dir:attribute-def:aliasedEntryName',
'aliasedObjectName': 'urn:mace:dir:attribute-def:aliasedObjectName',
'associatedDomain': 'urn:mace:dir:attribute-def:associatedDomain',
'associatedName': 'urn:mace:dir:attribute-def:associatedName',
'audio': 'urn:mace:dir:attribute-def:audio',
'authorityRevocationList': 'urn:mace:dir:attribute-def:authorityRevocationList',
'buildingName': 'urn:mace:dir:attribute-def:buildingName',
'businessCategory': 'urn:mace:dir:attribute-def:businessCategory',
'c': 'urn:mace:dir:attribute-def:c',
'cACertificate': 'urn:mace:dir:attribute-def:cACertificate',
'cNAMERecord': 'urn:mace:dir:attribute-def:cNAMERecord',
'carLicense': 'urn:mace:dir:attribute-def:carLicense',
'certificateRevocationList': 'urn:mace:dir:attribute-def:certificateRevocationList',
'cn': 'urn:mace:dir:attribute-def:cn',
'co': 'urn:mace:dir:attribute-def:co',
'commonName': 'urn:mace:dir:attribute-def:commonName',
'countryName': 'urn:mace:dir:attribute-def:countryName',
'crossCertificatePair': 'urn:mace:dir:attribute-def:crossCertificatePair',
'dITRedirect': 'urn:mace:dir:attribute-def:dITRedirect',
'dSAQuality': 'urn:mace:dir:attribute-def:dSAQuality',
'dc': 'urn:mace:dir:attribute-def:dc',
'deltaRevocationList': 'urn:mace:dir:attribute-def:deltaRevocationList',
'departmentNumber': 'urn:mace:dir:attribute-def:departmentNumber',
'description': 'urn:mace:dir:attribute-def:description',
'destinationIndicator': 'urn:mace:dir:attribute-def:destinationIndicator',
'displayName': 'urn:mace:dir:attribute-def:displayName',
'distinguishedName': 'urn:mace:dir:attribute-def:distinguishedName',
'dmdName': 'urn:mace:dir:attribute-def:dmdName',
'dnQualifier': 'urn:mace:dir:attribute-def:dnQualifier',
'documentAuthor': 'urn:mace:dir:attribute-def:documentAuthor',
'documentIdentifier': 'urn:mace:dir:attribute-def:documentIdentifier',
'documentLocation': 'urn:mace:dir:attribute-def:documentLocation',
'documentPublisher': 'urn:mace:dir:attribute-def:documentPublisher',
'documentTitle': 'urn:mace:dir:attribute-def:documentTitle',
'documentVersion': 'urn:mace:dir:attribute-def:documentVersion',
'domainComponent': 'urn:mace:dir:attribute-def:domainComponent',
'drink': 'urn:mace:dir:attribute-def:drink',
'eduOrgHomePageURI': 'urn:mace:dir:attribute-def:eduOrgHomePageURI',
'eduOrgIdentityAuthNPolicyURI': 'urn:mace:dir:attribute-def:eduOrgIdentityAuthNPolicyURI',
'eduOrgLegalName': 'urn:mace:dir:attribute-def:eduOrgLegalName',
'eduOrgSuperiorURI': 'urn:mace:dir:attribute-def:eduOrgSuperiorURI',
'eduOrgWhitePagesURI': 'urn:mace:dir:attribute-def:eduOrgWhitePagesURI',
'eduPersonAffiliation': 'urn:mace:dir:attribute-def:eduPersonAffiliation',
'eduPersonEntitlement': 'urn:mace:dir:attribute-def:eduPersonEntitlement',
'eduPersonNickname': 'urn:mace:dir:attribute-def:eduPersonNickname',
'eduPersonOrgDN': 'urn:mace:dir:attribute-def:eduPersonOrgDN',
'eduPersonOrgUnitDN': 'urn:mace:dir:attribute-def:eduPersonOrgUnitDN',
'eduPersonPrimaryAffiliation': 'urn:mace:dir:attribute-def:eduPersonPrimaryAffiliation',
'eduPersonPrimaryOrgUnitDN': 'urn:mace:dir:attribute-def:eduPersonPrimaryOrgUnitDN',
'eduPersonPrincipalName': 'urn:mace:dir:attribute-def:eduPersonPrincipalName',
'eduPersonScopedAffiliation': 'urn:mace:dir:attribute-def:eduPersonScopedAffiliation',
'eduPersonTargetedID': 'urn:mace:dir:attribute-def:eduPersonTargetedID',
'email': 'urn:mace:dir:attribute-def:email',
'emailAddress': 'urn:mace:dir:attribute-def:emailAddress',
'employeeNumber': 'urn:mace:dir:attribute-def:employeeNumber',
'employeeType': 'urn:mace:dir:attribute-def:employeeType',
'enhancedSearchGuide': 'urn:mace:dir:attribute-def:enhancedSearchGuide',
'facsimileTelephoneNumber': 'urn:mace:dir:attribute-def:facsimileTelephoneNumber',
'favouriteDrink': 'urn:mace:dir:attribute-def:favouriteDrink',
'fax': 'urn:mace:dir:attribute-def:fax',
'federationFeideSchemaVersion': 'urn:mace:dir:attribute-def:federationFeideSchemaVersion',
'friendlyCountryName': 'urn:mace:dir:attribute-def:friendlyCountryName',
'generationQualifier': 'urn:mace:dir:attribute-def:generationQualifier',
'givenName': 'urn:mace:dir:attribute-def:givenName',
'gn': 'urn:mace:dir:attribute-def:gn',
'homePhone': 'urn:mace:dir:attribute-def:homePhone',
'homePostalAddress': 'urn:mace:dir:attribute-def:homePostalAddress',
'homeTelephoneNumber': 'urn:mace:dir:attribute-def:homeTelephoneNumber',
'host': 'urn:mace:dir:attribute-def:host',
'houseIdentifier': 'urn:mace:dir:attribute-def:houseIdentifier',
'info': 'urn:mace:dir:attribute-def:info',
'initials': 'urn:mace:dir:attribute-def:initials',
'internationaliSDNNumber': 'urn:mace:dir:attribute-def:internationaliSDNNumber',
'janetMailbox': 'urn:mace:dir:attribute-def:janetMailbox',
'jpegPhoto': 'urn:mace:dir:attribute-def:jpegPhoto',
'knowledgeInformation': 'urn:mace:dir:attribute-def:knowledgeInformation',
'l': 'urn:mace:dir:attribute-def:l',
'labeledURI': 'urn:mace:dir:attribute-def:labeledURI',
'localityName': 'urn:mace:dir:attribute-def:localityName',
'mDRecord': 'urn:mace:dir:attribute-def:mDRecord',
'mXRecord': 'urn:mace:dir:attribute-def:mXRecord',
'mail': 'urn:mace:dir:attribute-def:mail',
'mailPreferenceOption': 'urn:mace:dir:attribute-def:mailPreferenceOption',
'manager': 'urn:mace:dir:attribute-def:manager',
'member': 'urn:mace:dir:attribute-def:member',
'mobile': 'urn:mace:dir:attribute-def:mobile',
'mobileTelephoneNumber': 'urn:mace:dir:attribute-def:mobileTelephoneNumber',
'nSRecord': 'urn:mace:dir:attribute-def:nSRecord',
'name': 'urn:mace:dir:attribute-def:name',
'norEduOrgAcronym': 'urn:mace:dir:attribute-def:norEduOrgAcronym',
'norEduOrgNIN': 'urn:mace:dir:attribute-def:norEduOrgNIN',
'norEduOrgSchemaVersion': 'urn:mace:dir:attribute-def:norEduOrgSchemaVersion',
'norEduOrgUniqueIdentifier': 'urn:mace:dir:attribute-def:norEduOrgUniqueIdentifier',
'norEduOrgUniqueNumber': 'urn:mace:dir:attribute-def:norEduOrgUniqueNumber',
'norEduOrgUnitUniqueIdentifier': 'urn:mace:dir:attribute-def:norEduOrgUnitUniqueIdentifier',
'norEduOrgUnitUniqueNumber': 'urn:mace:dir:attribute-def:norEduOrgUnitUniqueNumber',
'norEduPersonBirthDate': 'urn:mace:dir:attribute-def:norEduPersonBirthDate',
'norEduPersonLIN': 'urn:mace:dir:attribute-def:norEduPersonLIN',
'norEduPersonNIN': 'urn:mace:dir:attribute-def:norEduPersonNIN',
'o': 'urn:mace:dir:attribute-def:o',
'objectClass': 'urn:mace:dir:attribute-def:objectClass',
'organizationName': 'urn:mace:dir:attribute-def:organizationName',
'organizationalStatus': 'urn:mace:dir:attribute-def:organizationalStatus',
'organizationalUnitName': 'urn:mace:dir:attribute-def:organizationalUnitName',
'otherMailbox': 'urn:mace:dir:attribute-def:otherMailbox',
'ou': 'urn:mace:dir:attribute-def:ou',
'owner': 'urn:mace:dir:attribute-def:owner',
'pager': 'urn:mace:dir:attribute-def:pager',
'pagerTelephoneNumber': 'urn:mace:dir:attribute-def:pagerTelephoneNumber',
'personalSignature': 'urn:mace:dir:attribute-def:personalSignature',
'personalTitle': 'urn:mace:dir:attribute-def:personalTitle',
'photo': 'urn:mace:dir:attribute-def:photo',
'physicalDeliveryOfficeName': 'urn:mace:dir:attribute-def:physicalDeliveryOfficeName',
'pkcs9email': 'urn:mace:dir:attribute-def:pkcs9email',
'postOfficeBox': 'urn:mace:dir:attribute-def:postOfficeBox',
'postalAddress': 'urn:mace:dir:attribute-def:postalAddress',
'postalCode': 'urn:mace:dir:attribute-def:postalCode',
'preferredDeliveryMethod': 'urn:mace:dir:attribute-def:preferredDeliveryMethod',
'preferredLanguage': 'urn:mace:dir:attribute-def:preferredLanguage',
'presentationAddress': 'urn:mace:dir:attribute-def:presentationAddress',
'protocolInformation': 'urn:mace:dir:attribute-def:protocolInformation',
'pseudonym': 'urn:mace:dir:attribute-def:pseudonym',
'registeredAddress': 'urn:mace:dir:attribute-def:registeredAddress',
'rfc822Mailbox': 'urn:mace:dir:attribute-def:rfc822Mailbox',
'roleOccupant': 'urn:mace:dir:attribute-def:roleOccupant',
'roomNumber': 'urn:mace:dir:attribute-def:roomNumber',
'sOARecord': 'urn:mace:dir:attribute-def:sOARecord',
'searchGuide': 'urn:mace:dir:attribute-def:searchGuide',
'secretary': 'urn:mace:dir:attribute-def:secretary',
'seeAlso': 'urn:mace:dir:attribute-def:seeAlso',
'serialNumber': 'urn:mace:dir:attribute-def:serialNumber',
'singleLevelQuality': 'urn:mace:dir:attribute-def:singleLevelQuality',
'sn': 'urn:mace:dir:attribute-def:sn',
'st': 'urn:mace:dir:attribute-def:st',
'stateOrProvinceName': 'urn:mace:dir:attribute-def:stateOrProvinceName',
'street': 'urn:mace:dir:attribute-def:street',
'streetAddress': 'urn:mace:dir:attribute-def:streetAddress',
'subtreeMaximumQuality': 'urn:mace:dir:attribute-def:subtreeMaximumQuality',
'subtreeMinimumQuality': 'urn:mace:dir:attribute-def:subtreeMinimumQuality',
'supportedAlgorithms': 'urn:mace:dir:attribute-def:supportedAlgorithms',
'supportedApplicationContext': 'urn:mace:dir:attribute-def:supportedApplicationContext',
'surname': 'urn:mace:dir:attribute-def:surname',
'telephoneNumber': 'urn:mace:dir:attribute-def:telephoneNumber',
'teletexTerminalIdentifier': 'urn:mace:dir:attribute-def:teletexTerminalIdentifier',
'telexNumber': 'urn:mace:dir:attribute-def:telexNumber',
'textEncodedORAddress': 'urn:mace:dir:attribute-def:textEncodedORAddress',
'title': 'urn:mace:dir:attribute-def:title',
'uid': 'urn:mace:dir:attribute-def:uid',
'uniqueIdentifier': 'urn:mace:dir:attribute-def:uniqueIdentifier',
'uniqueMember': 'urn:mace:dir:attribute-def:uniqueMember',
'userCertificate': 'urn:mace:dir:attribute-def:userCertificate',
'userClass': 'urn:mace:dir:attribute-def:userClass',
'userPKCS12': 'urn:mace:dir:attribute-def:userPKCS12',
'userPassword': 'urn:mace:dir:attribute-def:userPassword',
'userSMIMECertificate': 'urn:mace:dir:attribute-def:userSMIMECertificate',
'userid': 'urn:mace:dir:attribute-def:userid',
'x121Address': 'urn:mace:dir:attribute-def:x121Address',
'x500UniqueIdentifier': 'urn:mace:dir:attribute-def:x500UniqueIdentifier',
}
} | apache-2.0 |
asfaltboy/GitSavvy | core/git_mixins/active_branch.py | 1 | 5082 | import re
import string
class ActiveBranchMixin():
def get_current_branch_name(self):
"""
Return the name of the last checkout-out branch.
"""
stdout = self.git("branch", "--no-color")
try:
correct_line = next(line for line in stdout.split("\n") if line.startswith("*"))
return correct_line[2:]
except StopIteration:
return None
def _get_branch_status_components(self):
"""
Return a tuple of:
0) boolean indicating whether repo is in detached state
1) boolean indicating whether this is initial commit
2) active branch name
3) remote branch name
4) boolean indicating whether branch is clean
5) # commits ahead of remote
6) # commits behind of remote
7) boolean indicating whether the remote branch is gone
"""
stdout = self.git("status", "-b", "--porcelain").strip()
first_line, *addl_lines = stdout.split("\n", 2)
# Any additional lines will mean files have changed or are untracked.
clean = len(addl_lines) == 0
if first_line.startswith("## HEAD (no branch)"):
return True, False, None, None, clean, None, None, False
if first_line.startswith("## Initial commit on "):
return False, True, first_line[21:], clean, None, None, None, False
valid_punctuation = "".join(c for c in string.punctuation if c not in "~^:?*[\\")
branch_pattern = "[A-Za-z0-9" + re.escape(valid_punctuation) + "]+?"
short_status_pattern = "## (" + branch_pattern + ")(\.\.\.(" + branch_pattern + ")( \[((ahead (\d+))(, )?)?(behind (\d+))?(gone)?\])?)?$"
status_match = re.match(short_status_pattern, first_line)
if not status_match:
return False, False, None if clean else addl_lines[0], None, clean, None, None, False
branch, _, remote, _, _, _, ahead, _, _, behind, gone = status_match.groups()
return False, False, branch, remote, clean, ahead, behind, bool(gone)
def get_branch_status(self, delim=None):
"""
Return a tuple of:
1) the name of the active branch
2) the status of the active local branch
compared to its remote counterpart.
If no remote or tracking branch is defined, do not include remote-data.
If HEAD is detached, provide that status instead.
If a delimeter is provided, join tuple components with it, and return
that value.
"""
detached, initial, branch, remote, clean, ahead, behind, gone = \
self._get_branch_status_components()
secondary = ""
if detached:
status = "HEAD is in a detached state."
elif initial:
status = "Initial commit on `{}`.".format(branch)
else:
tracking = " tracking `{}`".format(remote)
status = "On branch `{}`{}.".format(branch, tracking if remote else "")
if ahead and behind:
secondary = "You're ahead by {} and behind by {}.".format(ahead, behind)
elif ahead:
secondary = "You're ahead by {}.".format(ahead)
elif behind:
secondary = "You're behind by {}.".format(behind)
elif gone:
secondary = "The remote branch is gone."
if delim:
return delim.join((status, secondary)) if secondary else status
return status, secondary
def get_branch_status_short(self):
detached, initial, branch, remote, clean, ahead, behind, gone = \
self._get_branch_status_components()
dirty = "" if clean else "*"
if detached:
return "DETACHED" + dirty
output = branch + dirty
if ahead:
output += "+" + ahead
if behind:
output += "-" + behind
return output
def get_commit_hash_for_head(self):
"""
Get the SHA1 commit hash for the commit at HEAD.
"""
return self.git("rev-parse", "HEAD").strip()
def get_latest_commit_msg_for_head(self):
"""
Get last commit msg for the commit at HEAD.
"""
stdout = self.git(
"log",
"-n 1",
"--pretty=format:%h %s",
"--abbrev-commit",
throw_on_stderr=False
).strip()
return stdout or "No commits yet."
def get_upstream_for_active_branch(self):
"""
Return ref for remote tracking branch.
"""
return self.git("rev-parse", "--abbrev-ref", "--symbolic-full-name",
"@{u}", throw_on_stderr=False).strip()
def get_active_remote_branch(self):
"""
Return named tuple of the upstream for active branch.
"""
upstream = self.get_upstream_for_active_branch()
for branch in self.get_branches():
if branch.name_with_remote == upstream:
return branch
return None
| mit |
mrshelly/openerp71313 | openerp/addons/project_issue/project_issue.py | 1 | 29439 | #-*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.base_status.base_stage import base_stage
from openerp.addons.crm import crm
from datetime import datetime
from openerp.osv import fields,osv
from openerp.tools.translate import _
import binascii
import time
from openerp import tools
from openerp.tools import html2plaintext
class project_issue_version(osv.osv):
_name = "project.issue.version"
_order = "name desc"
_columns = {
'name': fields.char('Version Number', size=32, required=True),
'active': fields.boolean('Active', required=False),
}
_defaults = {
'active': 1,
}
project_issue_version()
_ISSUE_STATE = [('draft', 'New'), ('open', 'In Progress'), ('cancel', 'Cancelled'), ('done', 'Done'), ('pending', 'Pending')]
class project_issue(base_stage, osv.osv):
_name = "project.issue"
_description = "Project Issue"
_order = "priority, create_date desc"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_track = {
'state': {
'project_issue.mt_issue_new': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'new',
'project_issue.mt_issue_closed': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'done',
'project_issue.mt_issue_started': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'open',
},
'stage_id': {
'project_issue.mt_issue_stage': lambda self, cr, uid, obj, ctx=None: obj['state'] not in ['new', 'done', 'open'],
},
'kanban_state': {
'project_issue.mt_issue_blocked': lambda self, cr, uid, obj, ctx=None: obj['kanban_state'] == 'blocked',
},
}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if not context.get('default_project_id', False) and vals.get('project_id', False):
ctx = context.copy()
ctx['default_project_id'] = vals['project_id']
vals['stage_id'] = self._get_default_stage_id(cr, uid, context=ctx)
return super(project_issue, self).create(cr, uid, vals, context=context)
def _get_default_project_id(self, cr, uid, context=None):
""" Gives default project by checking if present in the context """
return self._resolve_project_id_from_context(cr, uid, context=context)
def _get_default_stage_id(self, cr, uid, context=None):
""" Gives default stage_id """
project_id = self._get_default_project_id(cr, uid, context=context)
return self.stage_find(cr, uid, [], project_id, [('state', '=', 'draft')], context=context)
def _resolve_project_id_from_context(self, cr, uid, context=None):
""" Returns ID of project based on the value of 'default_project_id'
context key, or None if it cannot be resolved to a single
project.
"""
if context is None:
context = {}
if type(context.get('default_project_id')) in (int, long):
return context.get('default_project_id')
if isinstance(context.get('default_project_id'), basestring):
project_name = context['default_project_id']
project_ids = self.pool.get('project.project').name_search(cr, uid, name=project_name, context=context)
if len(project_ids) == 1:
return int(project_ids[0][0])
return None
def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
access_rights_uid = access_rights_uid or uid
stage_obj = self.pool.get('project.task.type')
order = stage_obj._order
# lame hack to allow reverting search, should just work in the trivial case
if read_group_order == 'stage_id desc':
order = "%s desc" % order
# retrieve section_id from the context and write the domain
# - ('id', 'in', 'ids'): add columns that should be present
# - OR ('case_default', '=', True), ('fold', '=', False): add default columns that are not folded
# - OR ('project_ids', 'in', project_id), ('fold', '=', False) if project_id: add project columns that are not folded
search_domain = []
project_id = self._resolve_project_id_from_context(cr, uid, context=context)
if project_id:
search_domain += ['|', ('project_ids', '=', project_id)]
search_domain += [('id', 'in', ids)]
# perform search
stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context)
result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context)
# restore order of the search
result.sort(lambda x,y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0])))
fold = {}
for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context):
fold[stage.id] = stage.fold or False
return result, fold
def _compute_day(self, cr, uid, ids, fields, args, context=None):
"""
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Openday’s IDs
@return: difference between current date and log date
@param context: A standard dictionary for contextual values
"""
cal_obj = self.pool.get('resource.calendar')
res_obj = self.pool.get('resource.resource')
res = {}
for issue in self.browse(cr, uid, ids, context=context):
res[issue.id] = {}
for field in fields:
duration = 0
ans = False
hours = 0
date_create = datetime.strptime(issue.create_date, "%Y-%m-%d %H:%M:%S")
if field in ['working_hours_open','day_open']:
if issue.date_open:
date_open = datetime.strptime(issue.date_open, "%Y-%m-%d %H:%M:%S")
ans = date_open - date_create
date_until = issue.date_open
#Calculating no. of working hours to open the issue
if issue.project_id.resource_calendar_id:
hours = cal_obj.interval_hours_get(cr, uid, issue.project_id.resource_calendar_id.id,
date_create,
date_open)
elif field in ['working_hours_close','day_close']:
if issue.date_closed:
date_close = datetime.strptime(issue.date_closed, "%Y-%m-%d %H:%M:%S")
date_until = issue.date_closed
ans = date_close - date_create
#Calculating no. of working hours to close the issue
if issue.project_id.resource_calendar_id:
hours = cal_obj.interval_hours_get(cr, uid, issue.project_id.resource_calendar_id.id,
date_create,
date_close)
elif field in ['days_since_creation']:
if issue.create_date:
days_since_creation = datetime.today() - datetime.strptime(issue.create_date, "%Y-%m-%d %H:%M:%S")
res[issue.id][field] = days_since_creation.days
continue
elif field in ['inactivity_days']:
res[issue.id][field] = 0
if issue.date_action_last:
inactive_days = datetime.today() - datetime.strptime(issue.date_action_last, '%Y-%m-%d %H:%M:%S')
res[issue.id][field] = inactive_days.days
continue
if ans:
resource_id = False
if issue.user_id:
resource_ids = res_obj.search(cr, uid, [('user_id','=',issue.user_id.id)])
if resource_ids and len(resource_ids):
resource_id = resource_ids[0]
duration = float(ans.days)
if issue.project_id and issue.project_id.resource_calendar_id:
duration = float(ans.days) * 24
new_dates = cal_obj.interval_min_get(cr, uid,
issue.project_id.resource_calendar_id.id,
date_create,
duration, resource=resource_id)
no_days = []
date_until = datetime.strptime(date_until, '%Y-%m-%d %H:%M:%S')
for in_time, out_time in new_dates:
if in_time.date not in no_days:
no_days.append(in_time.date)
if out_time > date_until:
break
duration = len(no_days)
if field in ['working_hours_open','working_hours_close']:
res[issue.id][field] = hours
else:
res[issue.id][field] = abs(float(duration))
return res
def _hours_get(self, cr, uid, ids, field_names, args, context=None):
task_pool = self.pool.get('project.task')
res = {}
for issue in self.browse(cr, uid, ids, context=context):
progress = 0.0
if issue.task_id:
progress = task_pool._hours_get(cr, uid, [issue.task_id.id], field_names, args, context=context)[issue.task_id.id]['progress']
res[issue.id] = {'progress' : progress}
return res
def on_change_project(self, cr, uid, ids, project_id, context=None):
return {}
def _get_issue_task(self, cr, uid, ids, context=None):
issues = []
issue_pool = self.pool.get('project.issue')
for task in self.pool.get('project.task').browse(cr, uid, ids, context=context):
issues += issue_pool.search(cr, uid, [('task_id','=',task.id)])
return issues
def _get_issue_work(self, cr, uid, ids, context=None):
issues = []
issue_pool = self.pool.get('project.issue')
for work in self.pool.get('project.task.work').browse(cr, uid, ids, context=context):
if work.task_id:
issues += issue_pool.search(cr, uid, [('task_id','=',work.task_id.id)])
return issues
_columns = {
'id': fields.integer('ID', readonly=True),
'name': fields.char('Issue', size=128, required=True),
'active': fields.boolean('Active', required=False),
'create_date': fields.datetime('Creation Date', readonly=True,select=True),
'write_date': fields.datetime('Update Date', readonly=True),
'days_since_creation': fields.function(_compute_day, string='Days since creation date', \
multi='compute_day', type="integer", help="Difference in days between creation date and current date"),
'date_deadline': fields.date('Deadline'),
'section_id': fields.many2one('crm.case.section', 'Sales Team', \
select=True, help='Sales team to which Case belongs to.\
Define Responsible user and Email account for mail gateway.'),
'partner_id': fields.many2one('res.partner', 'Contact', select=1),
'company_id': fields.many2one('res.company', 'Company'),
'description': fields.text('Private Note'),
'state': fields.related('stage_id', 'state', type="selection", store=True,
selection=_ISSUE_STATE, string="Status", readonly=True,
help='The status is set to \'Draft\', when a case is created.\
If the case is in progress the status is set to \'Open\'.\
When the case is over, the status is set to \'Done\'.\
If the case needs to be reviewed then the status is \
set to \'Pending\'.'),
'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State',
track_visibility='onchange',
help="A Issue's kanban state indicates special situations affecting it:\n"
" * Normal is the default situation\n"
" * Blocked indicates something is preventing the progress of this issue\n"
" * Ready for next stage indicates the issue is ready to be pulled to the next stage",
readonly=True, required=False),
'email_from': fields.char('Email', size=128, help="These people will receive email.", select=1),
'email_cc': fields.char('Watchers Emails', size=256, help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"),
'date_open': fields.datetime('Opened', readonly=True,select=True),
# Project Issue fields
'date_closed': fields.datetime('Closed', readonly=True,select=True),
'date': fields.datetime('Date'),
'channel_id': fields.many2one('crm.case.channel', 'Channel', help="Communication channel."),
'categ_ids': fields.many2many('project.category', string='Tags'),
'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority', select=True),
'version_id': fields.many2one('project.issue.version', 'Version'),
'stage_id': fields.many2one ('project.task.type', 'Stage',
track_visibility='onchange',
domain="['&', ('fold', '=', False), ('project_ids', '=', project_id)]"),
'project_id':fields.many2one('project.project', 'Project', track_visibility='onchange'),
'duration': fields.float('Duration'),
'task_id': fields.many2one('project.task', 'Task', domain="[('project_id','=',project_id)]"),
'day_open': fields.function(_compute_day, string='Days to Open', \
multi='compute_day', type="float", store=True),
'day_close': fields.function(_compute_day, string='Days to Close', \
multi='compute_day', type="float", store=True),
'user_id': fields.many2one('res.users', 'Assigned to', required=False, select=1, track_visibility='onchange'),
'working_hours_open': fields.function(_compute_day, string='Working Hours to Open the Issue', \
multi='compute_day', type="float", store=True),
'working_hours_close': fields.function(_compute_day, string='Working Hours to Close the Issue', \
multi='compute_day', type="float", store=True),
'inactivity_days': fields.function(_compute_day, string='Days since last action', \
multi='compute_day', type="integer", help="Difference in days between last action and current date"),
'color': fields.integer('Color Index'),
'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True),
'date_action_last': fields.datetime('Last Action', readonly=1),
'date_action_next': fields.datetime('Next Action', readonly=1),
'progress': fields.function(_hours_get, string='Progress (%)', multi='hours', group_operator="avg", help="Computed as: Time Spent / Total Time.",
store = {
'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['task_id'], 10),
'project.task': (_get_issue_task, ['progress'], 10),
'project.task.work': (_get_issue_work, ['hours'], 10),
}),
}
_defaults = {
'active': 1,
'partner_id': lambda s, cr, uid, c: s._get_default_partner(cr, uid, c),
'email_from': lambda s, cr, uid, c: s._get_default_email(cr, uid, c),
'stage_id': lambda s, cr, uid, c: s._get_default_stage_id(cr, uid, c),
'section_id': lambda s, cr, uid, c: s._get_default_section_id(cr, uid, c),
'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'crm.helpdesk', context=c),
'priority': crm.AVAILABLE_PRIORITIES[2][0],
'kanban_state': 'normal',
}
_group_by_full = {
'stage_id': _read_group_stage_ids
}
def set_priority(self, cr, uid, ids, priority, *args):
"""Set lead priority
"""
return self.write(cr, uid, ids, {'priority' : priority})
def set_high_priority(self, cr, uid, ids, *args):
"""Set lead priority to high
"""
return self.set_priority(cr, uid, ids, '1')
def set_normal_priority(self, cr, uid, ids, *args):
"""Set lead priority to normal
"""
return self.set_priority(cr, uid, ids, '3')
def convert_issue_task(self, cr, uid, ids, context=None):
if context is None:
context = {}
case_obj = self.pool.get('project.issue')
data_obj = self.pool.get('ir.model.data')
task_obj = self.pool.get('project.task')
result = data_obj._get_id(cr, uid, 'project', 'view_task_search_form')
res = data_obj.read(cr, uid, result, ['res_id'])
id2 = data_obj._get_id(cr, uid, 'project', 'view_task_form2')
id3 = data_obj._get_id(cr, uid, 'project', 'view_task_tree2')
if id2:
id2 = data_obj.browse(cr, uid, id2, context=context).res_id
if id3:
id3 = data_obj.browse(cr, uid, id3, context=context).res_id
for bug in case_obj.browse(cr, uid, ids, context=context):
new_task_id = task_obj.create(cr, uid, {
'name': bug.name,
'partner_id': bug.partner_id.id,
'description':bug.description,
'date_deadline': bug.date,
'project_id': bug.project_id.id,
# priority must be in ['0','1','2','3','4'], while bug.priority is in ['1','2','3','4','5']
'priority': str(int(bug.priority) - 1),
'user_id': bug.user_id.id,
'planned_hours': 0.0,
})
vals = {
'task_id': new_task_id,
'stage_id': self.stage_find(cr, uid, [bug], bug.project_id.id, [('state', '=', 'pending')], context=context),
}
message = _("Project issue <b>converted</b> to task.")
self.message_post(cr, uid, [bug.id], body=message, context=context)
case_obj.write(cr, uid, [bug.id], vals, context=context)
return {
'name': _('Tasks'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'project.task',
'res_id': int(new_task_id),
'view_id': False,
'views': [(id2,'form'),(id3,'tree'),(False,'calendar'),(False,'graph')],
'type': 'ir.actions.act_window',
'search_view_id': res['res_id'],
'nodestroy': True
}
def copy(self, cr, uid, id, default=None, context=None):
issue = self.read(cr, uid, id, ['name'], context=context)
if not default:
default = {}
default = default.copy()
default.update(name=_('%s (copy)') % (issue['name']))
return super(project_issue, self).copy(cr, uid, id, default=default,
context=context)
def write(self, cr, uid, ids, vals, context=None):
#Update last action date every time the user change the stage, the state or send a new email
logged_fields = ['stage_id', 'state', 'message_ids']
if any([field in vals for field in logged_fields]):
vals['date_action_last'] = time.strftime('%Y-%m-%d %H:%M:%S')
return super(project_issue, self).write(cr, uid, ids, vals, context)
def onchange_task_id(self, cr, uid, ids, task_id, context=None):
if not task_id:
return {'value': {}}
task = self.pool.get('project.task').browse(cr, uid, task_id, context=context)
return {'value': {'user_id': task.user_id.id, }}
def case_reset(self, cr, uid, ids, context=None):
"""Resets case as draft
"""
res = super(project_issue, self).case_reset(cr, uid, ids, context)
self.write(cr, uid, ids, {'date_open': False, 'date_closed': False})
return res
# -------------------------------------------------------
# Stage management
# -------------------------------------------------------
def set_kanban_state_blocked(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'kanban_state': 'blocked'}, context=context)
def set_kanban_state_normal(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'kanban_state': 'normal'}, context=context)
def set_kanban_state_done(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'kanban_state': 'done'}, context=context)
def stage_find(self, cr, uid, cases, section_id, domain=[], order='sequence', context=None):
""" Override of the base.stage method
Parameter of the stage search taken from the issue:
- type: stage type must be the same or 'both'
- section_id: if set, stages must belong to this section or
be a default case
"""
if isinstance(cases, (int, long)):
cases = self.browse(cr, uid, cases, context=context)
# collect all section_ids
section_ids = []
if section_id:
section_ids.append(section_id)
for task in cases:
if task.project_id:
section_ids.append(task.project_id.id)
# OR all section_ids and OR with case_default
search_domain = []
if section_ids:
search_domain += [('|')] * (len(section_ids)-1)
for section_id in section_ids:
search_domain.append(('project_ids', '=', section_id))
search_domain += list(domain)
# perform search, return the first found
stage_ids = self.pool.get('project.task.type').search(cr, uid, search_domain, order=order, context=context)
if stage_ids:
return stage_ids[0]
return False
def case_cancel(self, cr, uid, ids, context=None):
""" Cancels case """
self.case_set(cr, uid, ids, 'cancelled', {'active': True}, context=context)
return True
def case_escalate(self, cr, uid, ids, context=None):
cases = self.browse(cr, uid, ids)
for case in cases:
data = {}
if case.project_id.project_escalation_id:
data['project_id'] = case.project_id.project_escalation_id.id
if case.project_id.project_escalation_id.user_id:
data['user_id'] = case.project_id.project_escalation_id.user_id.id
if case.task_id:
self.pool.get('project.task').write(cr, uid, [case.task_id.id], {'project_id': data['project_id'], 'user_id': False})
else:
raise osv.except_osv(_('Warning!'), _('You cannot escalate this issue.\nThe relevant Project has not configured the Escalation Project!'))
self.case_set(cr, uid, ids, 'draft', data, context=context)
return True
# -------------------------------------------------------
# Mail gateway
# -------------------------------------------------------
def message_new(self, cr, uid, msg, custom_values=None, context=None):
""" Overrides mail_thread message_new that is called by the mailgateway
through message_process.
This override updates the document according to the email.
"""
if custom_values is None: custom_values = {}
if context is None: context = {}
context['state_to'] = 'draft'
desc = html2plaintext(msg.get('body')) if msg.get('body') else ''
custom_values.update({
'name': msg.get('subject') or _("No Subject"),
'description': desc,
'email_from': msg.get('from'),
'email_cc': msg.get('cc'),
'user_id': False,
})
if msg.get('priority'):
custom_values['priority'] = msg.get('priority')
res_id = super(project_issue, self).message_new(cr, uid, msg, custom_values=custom_values, context=context)
return res_id
def message_update(self, cr, uid, ids, msg, update_vals=None, context=None):
""" Overrides mail_thread message_update that is called by the mailgateway
through message_process.
This method updates the document according to the email.
"""
if isinstance(ids, (str, int, long)):
ids = [ids]
if update_vals is None: update_vals = {}
# Update doc values according to the message
if msg.get('priority'):
update_vals['priority'] = msg.get('priority')
# Parse 'body' to find values to update
maps = {
'cost': 'planned_cost',
'revenue': 'planned_revenue',
'probability': 'probability',
}
for line in msg.get('body', '').split('\n'):
line = line.strip()
res = tools.command_re.match(line)
if res and maps.get(res.group(1).lower(), False):
key = maps.get(res.group(1).lower())
update_vals[key] = res.group(2).lower()
return super(project_issue, self).message_update(cr, uid, ids, msg, update_vals=update_vals, context=context)
class project(osv.osv):
_inherit = "project.project"
def _get_alias_models(self, cr, uid, context=None):
return [('project.task', "Tasks"), ("project.issue", "Issues")]
def _issue_count(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0)
issue_ids = self.pool.get('project.issue').search(cr, uid, [('project_id', 'in', ids)])
for issue in self.pool.get('project.issue').browse(cr, uid, issue_ids, context):
res[issue.project_id.id] += 1
return res
_columns = {
'project_escalation_id' : fields.many2one('project.project','Project Escalation', help='If any issue is escalated from the current Project, it will be listed under the project selected here.', states={'close':[('readonly',True)], 'cancelled':[('readonly',True)]}),
'issue_count': fields.function(_issue_count, type='integer'),
}
def _check_escalation(self, cr, uid, ids, context=None):
project_obj = self.browse(cr, uid, ids[0], context=context)
if project_obj.project_escalation_id:
if project_obj.project_escalation_id.id == project_obj.id:
return False
return True
_constraints = [
(_check_escalation, 'Error! You cannot assign escalation to the same project!', ['project_escalation_id'])
]
project()
class account_analytic_account(osv.osv):
_inherit = 'account.analytic.account'
_description = 'Analytic Account'
_columns = {
'use_issues' : fields.boolean('Issues', help="Check this field if this project manages issues"),
}
def on_change_template(self, cr, uid, ids, template_id, context=None):
res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, context=context)
if template_id and 'value' in res:
template = self.browse(cr, uid, template_id, context=context)
res['value']['use_issues'] = template.use_issues
return res
def _trigger_project_creation(self, cr, uid, vals, context=None):
if context is None: context = {}
res = super(account_analytic_account, self)._trigger_project_creation(cr, uid, vals, context=context)
return res or (vals.get('use_issues') and not 'project_creation_in_progress' in context)
account_analytic_account()
class project_project(osv.osv):
_inherit = 'project.project'
_defaults = {
'use_issues': True
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
tellapart/Diamond | src/collectors/network/test/testnetwork.py | 8 | 11711 | #!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
StringIO # workaround for pyflakes issue #13
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from network import NetworkCollector
################################################################################
class TestNetworkCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NetworkCollector', {
'interfaces': ['eth', 'em', 'bond', 'veth', 'br-lxc'],
'interval': 10,
'byte_unit': ['bit', 'megabit', 'megabyte'],
})
self.collector = NetworkCollector(config, None)
def test_import(self):
self.assertTrue(NetworkCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_net_dev(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/net/dev')
@patch.object(Collector, 'publish')
def test_should_work_with_virtual_interfaces_and_bridges(self,
publish_mock):
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_2')
self.collector.collect()
metrics = {
'eth0.rx_megabyte': (2.504, 2),
'eth0.tx_megabyte': (4.707, 2),
'eth1.rx_megabyte': (0.0, 2),
'eth1.tx_megabyte': (0.0, 2),
'em2.rx_megabyte': (2.504, 2),
'em2.tx_megabyte': (4.707, 2),
'bond3.rx_megabyte': (2.504, 2),
'bond3.tx_megabyte': (4.707, 2),
'vethmR3i5e.tx_megabyte': (0.223, 2),
'vethmR3i5e.rx_megabyte': (0.033, 2),
'br-lxc-247.tx_megabyte': (0.307, 2),
'br-lxc-247.rx_megabyte': (0.032, 2)
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_2')
self.collector.collect()
metrics = {
'eth0.rx_megabyte': (2.504, 2),
'eth0.tx_megabyte': (4.707, 2),
'eth1.rx_megabyte': (0.0, 2),
'eth1.tx_megabyte': (0.0, 2),
'em2.rx_megabyte': (2.504, 2),
'em2.tx_megabyte': (4.707, 2),
'bond3.rx_megabyte': (2.504, 2),
'bond3.tx_megabyte': (4.707, 2)
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
# Named test_z_* to run after test_should_open_proc_net_dev
@patch.object(Collector, 'publish')
def test_z_issue_208_a(self, publish_mock):
NetworkCollector.PROC = self.getFixturePath('208-a_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('208-a_2')
self.collector.collect()
metrics = {
'bond0.rx_bit': 2687979419428.0,
'bond0.rx_compressed': 0.0,
'bond0.rx_drop': 0.0,
'bond0.rx_errors': 0.0,
'bond0.rx_fifo': 0.0,
'bond0.rx_frame': 0.0,
'bond0.rx_multicast': 8481087.9,
'bond0.rx_packets': 264585067.9,
'bond0.tx_bit': 1569889402921.6,
'bond0.tx_compressed': 0.0,
'bond0.tx_drop': 0.0,
'bond0.tx_errors': 0.0,
'bond0.tx_fifo': 0.0,
'bond0.tx_frame': 0.0,
'bond0.tx_multicast': 0.0,
'bond0.tx_packets': 200109891.6,
'bond1.rx_bit': 16933606875970.4,
'bond1.rx_compressed': 0.0,
'bond1.rx_drop': 0.0,
'bond1.rx_errors': 0.0,
'bond1.rx_fifo': 0.0,
'bond1.rx_frame': 0.0,
'bond1.rx_multicast': 7.8,
'bond1.rx_packets': 2419703159.9,
'bond1.tx_bit': 17842573410005.6,
'bond1.tx_compressed': 0.0,
'bond1.tx_drop': 0.0,
'bond1.tx_errors': 0.0,
'bond1.tx_fifo': 0.0,
'bond1.tx_frame': 0.0,
'bond1.tx_multicast': 0.0,
'bond1.tx_packets': 2654259261.0,
'em1.rx_bit': 2687881969344.8,
'em1.rx_compressed': 0.0,
'em1.rx_drop': 0.0,
'em1.rx_errors': 0.0,
'em1.rx_fifo': 0.0,
'em1.rx_frame': 0.0,
'em1.rx_multicast': 8471878.8,
'em1.rx_packets': 264382058.1,
'em1.tx_bit': 1569889402921.6,
'em1.tx_compressed': 0.0,
'em1.tx_drop': 0.0,
'em1.tx_errors': 0.0,
'em1.tx_fifo': 0.0,
'em1.tx_frame': 0.0,
'em1.tx_multicast': 0.0,
'em1.tx_packets': 200109891.6,
'em2.rx_bit': 97450083.2,
'em2.rx_compressed': 0.0,
'em2.rx_drop': 0.0,
'em2.rx_errors': 0.0,
'em2.rx_fifo': 0.0,
'em2.rx_frame': 0.0,
'em2.rx_multicast': 9209.1,
'em2.rx_packets': 203009.8,
'em2.tx_bit': 0,
'em2.tx_compressed': 0.0,
'em2.tx_drop': 0.0,
'em2.tx_errors': 0.0,
'em2.tx_fifo': 0.0,
'em2.tx_frame': 0.0,
'em2.tx_multicast': 0.0,
'em2.tx_packets': 0.0,
'em3.rx_bit': 514398.4,
'em3.rx_compressed': 0.0,
'em3.rx_drop': 0.0,
'em3.rx_errors': 0.0,
'em3.rx_fifo': 0.0,
'em3.rx_frame': 0.0,
'em3.rx_multicast': 0.0,
'em3.rx_packets': 1071.6,
'em3.tx_bit': 0.0,
'em3.tx_compressed': 0.0,
'em3.tx_drop': 0.0,
'em3.tx_errors': 0.0,
'em3.tx_fifo': 0.0,
'em3.tx_frame': 0.0,
'em3.tx_multicast': 0.0,
'em3.tx_packets': 0.0,
'em4.rx_bit': 16933606361572.0,
'em4.rx_compressed': 0.0,
'em4.rx_drop': 0.0,
'em4.rx_errors': 0.0,
'em4.rx_fifo': 0.0,
'em4.rx_frame': 0.0,
'em4.rx_multicast': 7.8,
'em4.rx_packets': 2419702088.3,
'em4.tx_bit': 17842573410005.6,
'em4.tx_compressed': 0.0,
'em4.tx_drop': 0.0,
'em4.tx_errors': 0.0,
'em4.tx_fifo': 0.0,
'em4.tx_frame': 0.0,
'em4.tx_multicast': 0.0,
'em4.tx_packets': 2654259261.0,
}
self.assertPublishedMany(publish_mock, metrics)
# Named test_z_* to run after test_should_open_proc_net_dev
@patch.object(Collector, 'publish')
def test_z_issue_208_b(self, publish_mock):
NetworkCollector.PROC = self.getFixturePath('208-b_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('208-b_2')
self.collector.collect()
metrics = {
'bond0.rx_bit': 12754357408.8,
'bond0.rx_compressed': 0.0,
'bond0.rx_drop': 0.0,
'bond0.rx_errors': 0.0,
'bond0.rx_fifo': 0.0,
'bond0.rx_frame': 0.0,
'bond0.rx_multicast': 8483853.6,
'bond0.rx_packets': 13753449.5,
'bond0.tx_bit': 51593345279.2,
'bond0.tx_compressed': 0.0,
'bond0.tx_drop': 0.0,
'bond0.tx_errors': 0.0,
'bond0.tx_fifo': 0.0,
'bond0.tx_frame': 0.0,
'bond0.tx_multicast': 0.0,
'bond0.tx_packets': 58635426.6,
'bond1.rx_bit': 48298217736175.2,
'bond1.rx_compressed': 0.0,
'bond1.rx_drop': 0.0,
'bond1.rx_errors': 0.0,
'bond1.rx_fifo': 473.8,
'bond1.rx_frame': 0.0,
'bond1.rx_multicast': 2.9,
'bond1.rx_packets': 4869871086.2,
'bond1.tx_bit': 23149038213964.0,
'bond1.tx_compressed': 0.0,
'bond1.tx_drop': 0.0,
'bond1.tx_errors': 0.0,
'bond1.tx_fifo': 0.0,
'bond1.tx_frame': 0.0,
'bond1.tx_multicast': 0.0,
'bond1.tx_packets': 2971941537.3,
'em1.rx_bit': 12657057999.2,
'em1.rx_compressed': 0.0,
'em1.rx_drop': 0.0,
'em1.rx_errors': 0.0,
'em1.rx_fifo': 0.0,
'em1.rx_frame': 0.0,
'em1.rx_multicast': 8474644.4,
'em1.rx_packets': 13550781.5,
'em1.tx_bit': 51593345279.2,
'em1.tx_compressed': 0.0,
'em1.tx_drop': 0.0,
'em1.tx_errors': 0.0,
'em1.tx_fifo': 0.0,
'em1.tx_frame': 0.0,
'em1.tx_multicast': 0.0,
'em1.tx_packets': 58635426.6,
'em2.rx_bit': 97299409.6,
'em2.rx_compressed': 0.0,
'em2.rx_drop': 0.0,
'em2.rx_errors': 0.0,
'em2.rx_fifo': 0.0,
'em2.rx_frame': 0.0,
'em2.rx_multicast': 9209.2,
'em2.rx_packets': 202668.0,
'em2.tx_bit': 0,
'em2.tx_compressed': 0.0,
'em2.tx_drop': 0.0,
'em2.tx_errors': 0.0,
'em2.tx_fifo': 0.0,
'em2.tx_frame': 0.0,
'em2.tx_multicast': 0.0,
'em2.tx_packets': 0.0,
'em3.rx_bit': 48298184648012.0,
'em3.rx_compressed': 0.0,
'em3.rx_drop': 0.0,
'em3.rx_errors': 0.0,
'em3.rx_fifo': 473.8,
'em3.rx_frame': 0.0,
'em3.rx_multicast': 2.9,
'em3.rx_packets': 4869866440.5,
'em3.tx_bit': 23149038213964.0,
'em3.tx_compressed': 0.0,
'em3.tx_drop': 0.0,
'em3.tx_errors': 0.0,
'em3.tx_fifo': 0.0,
'em3.tx_frame': 0.0,
'em3.tx_multicast': 0.0,
'em3.tx_packets': 2971941537.3,
'em4.rx_bit': 33088163.2,
'em4.rx_compressed': 0.0,
'em4.rx_drop': 0.0,
'em4.rx_errors': 0.0,
'em4.rx_fifo': 0.0,
'em4.rx_frame': 0.0,
'em4.rx_multicast': 0.0,
'em4.rx_packets': 4645.7,
'em4.tx_bit': 0,
'em4.tx_compressed': 0.0,
'em4.tx_drop': 0.0,
'em4.tx_errors': 0.0,
'em4.tx_fifo': 0.0,
'em4.tx_frame': 0.0,
'em4.tx_multicast': 0.0,
'em4.tx_packets': 0.0,
}
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
| mit |
beckastar/django | django/conf/locale/ko/formats.py | 82 | 2324 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y년 n월 j일'
TIME_FORMAT = 'A g:i:s'
DATETIME_FORMAT = 'Y년 n월 j일 g:i:s A'
YEAR_MONTH_FORMAT = 'Y년 F월'
MONTH_DAY_FORMAT = 'F월 j일'
SHORT_DATE_FORMAT = 'Y-n-j.'
SHORT_DATETIME_FORMAT = 'Y-n-j H:i'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
'%Y년 %m월 %d일', # '2006년 10월 25일', with localized suffix.
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
'%H시 %M분 %S초', # '14시 30분 59초'
'%H시 %M분', # '14시 30분'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
'%Y년 %m월 %d일 %H시 %M분 %S초', # '2006년 10월 25일 14시 30분 59초'
'%Y년 %m월 %d일 %H시 %M분', # '2006년 10월 25일 14시 30분'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| bsd-3-clause |
campbe13/openhatch | vendor/packages/sphinx/tests/etree13/HTMLTreeBuilder.py | 25 | 7771 | #
# ElementTree
# $Id$
#
# a simple tree builder, for HTML input
#
# history:
# 2002-04-06 fl created
# 2002-04-07 fl ignore IMG and HR end tags
# 2002-04-07 fl added support for 1.5.2 and later
# 2003-04-13 fl added HTMLTreeBuilder alias
# 2004-12-02 fl don't feed non-ASCII charrefs/entities as 8-bit strings
# 2004-12-05 fl don't feed non-ASCII CDATA as 8-bit strings
#
# Copyright (c) 1999-2004 by Fredrik Lundh. All rights reserved.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2007 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Tools to build element trees from HTML files.
##
import htmlentitydefs
import re, string, sys
import mimetools, StringIO
import ElementTree
AUTOCLOSE = "p", "li", "tr", "th", "td", "head", "body"
IGNOREEND = "img", "hr", "meta", "link", "br"
if sys.version[:3] == "1.5":
is_not_ascii = re.compile(r"[\x80-\xff]").search # 1.5.2
else:
is_not_ascii = re.compile(eval(r'u"[\u0080-\uffff]"')).search
try:
from HTMLParser import HTMLParser
except ImportError:
from sgmllib import SGMLParser
# hack to use sgmllib's SGMLParser to emulate 2.2's HTMLParser
class HTMLParser(SGMLParser):
# the following only works as long as this class doesn't
# provide any do, start, or end handlers
def unknown_starttag(self, tag, attrs):
self.handle_starttag(tag, attrs)
def unknown_endtag(self, tag):
self.handle_endtag(tag)
##
# ElementTree builder for HTML source code. This builder converts an
# HTML document or fragment to an ElementTree.
# <p>
# The parser is relatively picky, and requires balanced tags for most
# elements. However, elements belonging to the following group are
# automatically closed: P, LI, TR, TH, and TD. In addition, the
# parser automatically inserts end tags immediately after the start
# tag, and ignores any end tags for the following group: IMG, HR,
# META, and LINK.
#
# @keyparam builder Optional builder object. If omitted, the parser
# uses the standard <b>elementtree</b> builder.
# @keyparam encoding Optional character encoding, if known. If omitted,
# the parser looks for META tags inside the document. If no tags
# are found, the parser defaults to ISO-8859-1. Note that if your
# document uses a non-ASCII compatible encoding, you must decode
# the document before parsing.
#
# @see elementtree.ElementTree
class HTMLTreeBuilder(HTMLParser):
# FIXME: shouldn't this class be named Parser, not Builder?
def __init__(self, builder=None, encoding=None):
self.__stack = []
if builder is None:
builder = ElementTree.TreeBuilder()
self.__builder = builder
self.encoding = encoding or "iso-8859-1"
HTMLParser.__init__(self)
##
# Flushes parser buffers, and return the root element.
#
# @return An Element instance.
def close(self):
HTMLParser.close(self)
return self.__builder.close()
##
# (Internal) Handles start tags.
def handle_starttag(self, tag, attrs):
if tag == "meta":
# look for encoding directives
http_equiv = content = None
for k, v in attrs:
if k == "http-equiv":
http_equiv = string.lower(v)
elif k == "content":
content = v
if http_equiv == "content-type" and content:
# use mimetools to parse the http header
header = mimetools.Message(
StringIO.StringIO("%s: %s\n\n" % (http_equiv, content))
)
encoding = header.getparam("charset")
if encoding:
self.encoding = encoding
if tag in AUTOCLOSE:
if self.__stack and self.__stack[-1] == tag:
self.handle_endtag(tag)
self.__stack.append(tag)
attrib = {}
if attrs:
for k, v in attrs:
attrib[string.lower(k)] = v
self.__builder.start(tag, attrib)
if tag in IGNOREEND:
self.__stack.pop()
self.__builder.end(tag)
##
# (Internal) Handles end tags.
def handle_endtag(self, tag):
if tag in IGNOREEND:
return
lasttag = self.__stack.pop()
if tag != lasttag and lasttag in AUTOCLOSE:
self.handle_endtag(lasttag)
self.__builder.end(tag)
##
# (Internal) Handles character references.
def handle_charref(self, char):
if char[:1] == "x":
char = int(char[1:], 16)
else:
char = int(char)
if 0 <= char < 128:
self.__builder.data(chr(char))
else:
self.__builder.data(unichr(char))
##
# (Internal) Handles entity references.
def handle_entityref(self, name):
entity = htmlentitydefs.entitydefs.get(name)
if entity:
if len(entity) == 1:
entity = ord(entity)
else:
entity = int(entity[2:-1])
if 0 <= entity < 128:
self.__builder.data(chr(entity))
else:
self.__builder.data(unichr(entity))
else:
self.unknown_entityref(name)
##
# (Internal) Handles character data.
def handle_data(self, data):
if isinstance(data, type('')) and is_not_ascii(data):
# convert to unicode, but only if necessary
data = unicode(data, self.encoding, "ignore")
self.__builder.data(data)
##
# (Hook) Handles unknown entity references. The default action
# is to ignore unknown entities.
def unknown_entityref(self, name):
pass # ignore by default; override if necessary
##
# An alias for the <b>HTMLTreeBuilder</b> class.
TreeBuilder = HTMLTreeBuilder
##
# Parse an HTML document or document fragment.
#
# @param source A filename or file object containing HTML data.
# @param encoding Optional character encoding, if known. If omitted,
# the parser looks for META tags inside the document. If no tags
# are found, the parser defaults to ISO-8859-1.
# @return An ElementTree instance
def parse(source, encoding=None):
return ElementTree.parse(source, HTMLTreeBuilder(encoding=encoding))
if __name__ == "__main__":
import sys
ElementTree.dump(parse(open(sys.argv[1])))
| agpl-3.0 |
nrjcoin-project/p2pool | p2pool/test/bitcoin/test_getwork.py | 275 | 4273 | import unittest
from p2pool.bitcoin import getwork, data as bitcoin_data
class Test(unittest.TestCase):
def test_all(self):
cases = [
{
'target': '0000000000000000000000000000000000000000000000f2b944000000000000',
'midstate': '5982f893102dec03e374b472647c4f19b1b6d21ae4b2ac624f3d2f41b9719404',
'hash1': '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'data': '0000000163930d52a5ffca79b29b95a659a302cd4e1654194780499000002274000000002e133d9e51f45bc0886d05252038e421e82bff18b67dc14b90d9c3c2f422cd5c4dd4598e1a44b9f200000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000'
},
{
'midstate' : 'f4a9b048c0cb9791bc94b13ee0eec21e713963d524fd140b58bb754dd7b0955f',
'data' : '000000019a1d7342fb62090bda686b22d90f9f73d0f5c418b9c980cd0000011a00000000680b07c8a2f97ecd831f951806857e09f98a3b81cdef1fa71982934fef8dc3444e18585d1a0abbcf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000',
'hash1' : '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'target' : '0000000000000000000000000000000000000000000000cfbb0a000000000000',
'extrathing': 'hi!',
},
{
'data' : '000000019a1d7342fb62090bda686b22d90f9f73d0f5c418b9c980cd0000011a00000000680b07c8a2f97ecd831f951806857e09f98a3b81cdef1fa71982934fef8dc3444e18585d1a0abbcf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000',
'hash1' : '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'target' : '0000000000000000000000000000000000000000000000cfbb0a000000000000',
'extrathing': 'hi!',
},
]
for case in cases:
ba = getwork.BlockAttempt.from_getwork(case)
extra = dict(case)
del extra['data'], extra['hash1'], extra['target']
extra.pop('midstate', None)
getwork_check = ba.getwork(**extra)
assert getwork_check == case or dict((k, v) for k, v in getwork_check.iteritems() if k != 'midstate') == case
case2s = [
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
0x44b9f20000000000000000000000000000000000000000000000,
),
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
432*2**230,
),
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
7*2**240,
)
]
for case2 in case2s:
assert getwork.BlockAttempt.from_getwork(case2.getwork()) == case2
assert getwork.BlockAttempt.from_getwork(case2.getwork(ident='hi')) == case2
case2 = case2.update(previous_block=case2.previous_block - 10)
assert getwork.BlockAttempt.from_getwork(case2.getwork()) == case2
assert getwork.BlockAttempt.from_getwork(case2.getwork(ident='hi')) == case2
| gpl-3.0 |
cg31/tensorflow | tensorflow/contrib/layers/__init__.py | 9 | 2629 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for building neural network layers, regularizers, summaries, etc.
## Higher level ops for building neural network layers.
This package provides several ops that take care of creating variables that are
used internally in a consistent way and provide the building blocks for many
common machine learning algorithms.
@@avg_pool2d
@@batch_norm
@@convolution2d
@@convolution2d_in_plane
@@convolution2d_transpose
@@flatten
@@fully_connected
@@layer_norm
@@max_pool2d
@@one_hot_encoding
@@repeat
@@safe_embedding_lookup_sparse
@@separable_convolution2d
@@stack
@@unit_norm
Aliases for fully_connected which set a default activation function are
available: `relu`, `relu6` and `linear`.
## Regularizers
Regularization can help prevent overfitting. These have the signature
`fn(weights)`. The loss is typically added to
`tf.GraphKeys.REGULARIZATION_LOSSES`.
@@apply_regularization
@@l1_regularizer
@@l2_regularizer
@@sum_regularizer
## Initializers
Initializers are used to initialize variables with sensible values given their
size, data type, and purpose.
@@xavier_initializer
@@xavier_initializer_conv2d
@@variance_scaling_initializer
## Optimization
Optimize weights given a loss.
@@optimize_loss
## Summaries
Helper functions to summarize specific variables or ops.
@@summarize_activation
@@summarize_tensor
@@summarize_tensors
@@summarize_collection
The layers module defines convenience functions `summarize_variables`,
`summarize_weights` and `summarize_biases`, which set the `collection` argument
of `summarize_collection` to `VARIABLES`, `WEIGHTS` and `BIASES`, respectively.
@@summarize_activations
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
# pylint: disable=unused-import,wildcard-import
from tensorflow.contrib.layers.python.layers import *
from tensorflow.python.util.all_util import make_all
__all__ = make_all(__name__)
| apache-2.0 |
jounex/hue | desktop/core/ext-py/Django-1.6.10/django/contrib/sitemaps/tests/urls/http.py | 106 | 1647 | from datetime import datetime
from django.conf.urls import patterns, url
from django.contrib.sitemaps import Sitemap, GenericSitemap, FlatPageSitemap, views
from django.views.decorators.cache import cache_page
from django.contrib.sitemaps.tests.base import TestModel
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
lastmod = datetime.now()
def items(self):
return [object()]
simple_sitemaps = {
'simple': SimpleSitemap,
}
generic_sitemaps = {
'generic': GenericSitemap({'queryset': TestModel.objects.all()}),
}
flatpage_sitemaps = {
'flatpages': FlatPageSitemap,
}
urlpatterns = patterns('django.contrib.sitemaps.views',
(r'^simple/index\.xml$', 'index', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-index\.xml$', 'index',
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap_index.xml'}),
(r'^simple/sitemap-(?P<section>.+)\.xml$', 'sitemap',
{'sitemaps': simple_sitemaps}),
(r'^simple/sitemap\.xml$', 'sitemap', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-sitemap\.xml$', 'sitemap',
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap.xml'}),
(r'^generic/sitemap\.xml$', 'sitemap', {'sitemaps': generic_sitemaps}),
(r'^flatpages/sitemap\.xml$', 'sitemap', {'sitemaps': flatpage_sitemaps}),
url(r'^cached/index\.xml$', cache_page(1)(views.index),
{'sitemaps': simple_sitemaps, 'sitemap_url_name': 'cached_sitemap'}),
url(r'^cached/sitemap-(?P<section>.+)\.xml', cache_page(1)(views.sitemap),
{'sitemaps': simple_sitemaps}, name='cached_sitemap')
)
| apache-2.0 |
buckle2000/godot | tools/export/blender25/godot_export_manager.py | 13 | 20513 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Script copyright (c) Andreas Esau
bl_info = {
"name": "Godot Export Manager",
"author": "Andreas Esau",
"version": (1, 0),
"blender": (2, 7, 0),
"location": "Scene Properties > Godot Export Manager",
"description": "Godot Export Manager uses the Better Collada Exporter to manage Export Groups and automatically export the objects groups to Collada Files.",
"warning": "",
"wiki_url": ("http://www.godotengine.org"),
"tracker_url": "",
"category": "Import-Export"}
import bpy
from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatProperty, FloatVectorProperty, IntProperty, CollectionProperty, PointerProperty
import os
from bpy.app.handlers import persistent
from mathutils import Vector, Matrix
class godot_export_manager(bpy.types.Panel):
bl_label = "Godot Export Manager"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "scene"
bpy.types.Scene.godot_export_on_save = BoolProperty(default=False)
### draw function for all ui elements
def draw(self, context):
layout = self.layout
split = self.layout.split()
scene = bpy.data.scenes[0]
ob = context.object
scene = context.scene
row = layout.row()
col = row.column()
col.prop(scene,"godot_export_on_save",text="Export Groups on save")
row = layout.row()
col = row.column(align=True)
op = col.operator("scene.godot_add_objects_to_group",text="Add selected objects to Group",icon="COPYDOWN")
op = col.operator("scene.godot_delete_objects_from_group",text="Delete selected objects from Group",icon="PASTEDOWN")
row = layout.row()
col = row.column()
col.label(text="Export Groups:")
row = layout.row()
col = row.column()
col.template_list("UI_List_Godot","dummy",scene, "godot_export_groups", scene, "godot_export_groups_index",rows=1,maxrows=10,type='DEFAULT')
col = row.column(align=True)
col.operator("scene.godot_add_export_group",text="",icon="ZOOMIN")
col.operator("scene.godot_delete_export_group",text="",icon="ZOOMOUT")
col.operator("scene.godot_export_all_groups",text="",icon="EXPORT")
if len(scene.godot_export_groups) > 0:
row = layout.row()
col = row.column()
group = scene.godot_export_groups[scene.godot_export_groups_index]
col.prop(group,"name",text="Group Name")
col.prop(group,"export_name",text="Export Name")
col.prop(group,"export_path",text="Export Filepath")
row = layout.row()
col = row.column()
row = layout.row()
col = row.column()
col.label(text="Export Settings:")
col = col.row(align=True)
col.prop(group,"apply_loc",toggle=True,icon="MAN_TRANS")
col.prop(group,"apply_rot",toggle=True,icon="MAN_ROT")
col.prop(group,"apply_scale",toggle=True,icon="MAN_SCALE")
row = layout.row()
col = row.column()
col.prop(group,"use_include_particle_duplicates")
col.prop(group,"use_mesh_modifiers")
col.prop(group,"use_tangent_arrays")
col.prop(group,"use_triangles")
col.prop(group,"use_copy_images")
col.prop(group,"use_active_layers")
col.prop(group,"use_anim")
col.prop(group,"use_anim_action_all")
col.prop(group,"use_anim_skip_noexp")
col.prop(group,"use_anim_optimize")
col.prop(group,"anim_optimize_precision")
col.prop(group,"use_metadata")
### Custom template_list look
class UI_List_Godot(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
ob = data
slot = item
col = layout.row(align=True)
col.label(text=item.name,icon="GROUP")
col.prop(item,"active",text="")
op = col.operator("scene.godot_select_group_objects",text="",emboss=False,icon="RESTRICT_SELECT_OFF")
op.idx = index
op = col.operator("scene.godot_export_group",text="",emboss=False,icon="EXPORT")
op.idx = index
class add_objects_to_group(bpy.types.Operator):
bl_idname = "scene.godot_add_objects_to_group"
bl_label = "Add Objects to Group"
bl_description = "Adds the selected Objects to the active group below."
undo = BoolProperty(default=True)
def execute(self,context):
scene = context.scene
objects_str = ""
if len(scene.godot_export_groups) > 0:
for i,object in enumerate(context.selected_objects):
if object.name not in scene.godot_export_groups[scene.godot_export_groups_index].nodes:
node = scene.godot_export_groups[scene.godot_export_groups_index].nodes.add()
node.name = object.name
if i == 0:
objects_str += object.name
else:
objects_str += ", "+object.name
self.report({'INFO'}, objects_str + " added to group." )
if self.undo:
bpy.ops.ed.undo_push(message="Objects added to group")
else:
self.report({'WARNING'}, "Create a group first." )
return{'FINISHED'}
class del_objects_from_group(bpy.types.Operator):
bl_idname = "scene.godot_delete_objects_from_group"
bl_label = "Delete Objects from Group"
bl_description = "Delets the selected Objects from the active group below."
def execute(self,context):
scene = context.scene
if len(scene.godot_export_groups) > 0:
selected_objects = []
for object in context.selected_objects:
selected_objects.append(object.name)
objects_str = ""
j = 0
for i,node in enumerate(scene.godot_export_groups[scene.godot_export_groups_index].nodes):
if node.name in selected_objects:
scene.godot_export_groups[scene.godot_export_groups_index].nodes.remove(i)
if j == 0:
objects_str += object.name
else:
objects_str += ", "+object.name
j+=1
self.report({'INFO'}, objects_str + " deleted from group." )
bpy.ops.ed.undo_push(message="Objects deleted from group")
else:
self.report({'WARNING'}, "There is no group to delete from." )
return{'FINISHED'}
class select_group_objects(bpy.types.Operator):
bl_idname = "scene.godot_select_group_objects"
bl_label = "Select Group Objects"
bl_description = "Will select all group Objects in the scene."
idx = IntProperty()
def execute(self,context):
scene = context.scene
for object in context.scene.objects:
object.select = False
for node in scene.godot_export_groups[self.idx].nodes:
if node.name in bpy.data.objects:
bpy.data.objects[node.name].select = True
context.scene.objects.active = bpy.data.objects[node.name]
return{'FINISHED'}
class export_groups_autosave(bpy.types.Operator):
bl_idname = "scene.godot_export_groups_autosave"
bl_label = "Export All Groups"
bl_description = "Exports all groups to Collada."
def execute(self,context):
scene = context.scene
if scene.godot_export_on_save:
for i in range(len(scene.godot_export_groups)):
if scene.godot_export_groups[i].active:
bpy.ops.scene.godot_export_group(idx=i)
self.report({'INFO'}, "All Groups exported." )
bpy.ops.ed.undo_push(message="Export all Groups")
return{'FINISHED'}
class export_all_groups(bpy.types.Operator):
bl_idname = "scene.godot_export_all_groups"
bl_label = "Export All Groups"
bl_description = "Exports all groups to Collada."
def execute(self,context):
scene = context.scene
for i in range(0,len(scene.godot_export_groups)):
bpy.ops.scene.godot_export_group(idx=i,export_all=True)
self.report({'INFO'}, "All Groups exported." )
return{'FINISHED'}
class export_group(bpy.types.Operator):
bl_idname = "scene.godot_export_group"
bl_label = "Export Group"
bl_description = "Exports the active group to destination folder as Collada file."
idx = IntProperty(default=0)
export_all = BoolProperty(default=False)
def copy_object_recursive(self,ob,parent,single_user = True):
new_ob = bpy.data.objects[ob.name].copy()
if single_user or ob.type=="ARMATURE":
new_mesh_data = new_ob.data.copy()
new_ob.data = new_mesh_data
bpy.context.scene.objects.link(new_ob)
if ob != parent:
new_ob.parent = parent
else:
new_ob.parent = None
for child in ob.children:
self.copy_object_recursive(child,new_ob,single_user)
new_ob.select = True
return new_ob
def delete_object(self,ob):
if ob != None:
for child in ob.children:
self.delete_object(child)
bpy.context.scene.objects.unlink(ob)
bpy.data.objects.remove(ob)
def convert_group_to_node(self,group):
if group.dupli_group != None:
for object in group.dupli_group.objects:
if object.parent == None:
object = self.copy_object_recursive(object,object,True)
matrix = Matrix(object.matrix_local)
object.matrix_local = Matrix()
object.matrix_local *= group.matrix_local
object.matrix_local *= matrix
self.delete_object(group)
def execute(self,context):
scene = context.scene
group = context.scene.godot_export_groups
if not group[self.idx].active and self.export_all:
return{'FINISHED'}
for i,object in enumerate(group[self.idx].nodes):
if object.name in bpy.data.objects:
pass
else:
group[self.idx].nodes.remove(i)
bpy.ops.ed.undo_push(message="Clear not existent Group Nodes.")
path = group[self.idx].export_path
if (path.find("//")==0 or path.find("\\\\")==0):
#if relative, convert to absolute
path = bpy.path.abspath(path)
path = path.replace("\\","/")
### if path exists and group export name is set the group will be exported
if os.path.exists(path) and group[self.idx].export_name != "":
context.scene.layers = [True,True,True,True,True,True,True,True,True,True,True,True,True,True,True,True,True,True,True,True]
if group[self.idx].export_name.endswith(".dae"):
path = os.path.join(path,group[self.idx].export_name)
else:
path = os.path.join(path,group[self.idx].export_name+".dae")
hide_select = []
for object in context.scene.objects:
hide_select.append(object.hide_select)
object.hide_select = False
object.select = False
context.scene.objects.active = None
### make particle duplicates, parent and select them
nodes_to_be_added = []
if group[self.idx].use_include_particle_duplicates:
for i,object in enumerate(group[self.idx].nodes):
if bpy.data.objects[object.name].type != "EMPTY":
context.scene.objects.active = bpy.data.objects[object.name]
bpy.data.objects[object.name].select = True
bpy.ops.object.duplicates_make_real()
for object in context.selected_objects:
nodes_to_be_added.append(object)
bpy.ops.object.parent_set(type="OBJECT", keep_transform=False)
for object in context.selected_objects:
object.select = False
bpy.data.objects[object.name].select = False
context.scene.objects.active = None
for object in nodes_to_be_added:
object.select = True
### select all other nodes from the group
for i,object in enumerate(group[self.idx].nodes):
if bpy.data.objects[object.name].type == "EMPTY":
self.convert_group_to_node(bpy.data.objects[object.name])
else:
bpy.data.objects[object.name].select = True
bpy.ops.object.transform_apply(location=group[self.idx].apply_loc, rotation=group[self.idx].apply_rot, scale=group[self.idx].apply_scale)
bpy.ops.export_scene.dae(check_existing=True, filepath=path, filter_glob="*.dae", object_types=group[self.idx].object_types, use_export_selected=group[self.idx].use_export_selected, use_mesh_modifiers=group[self.idx].use_mesh_modifiers, use_tangent_arrays=group[self.idx].use_tangent_arrays, use_triangles=group[self.idx].use_triangles, use_copy_images=group[self.idx].use_copy_images, use_active_layers=group[self.idx].use_active_layers, use_anim=group[self.idx].use_anim, use_anim_action_all=group[self.idx].use_anim_action_all, use_anim_skip_noexp=group[self.idx].use_anim_skip_noexp, use_anim_optimize=group[self.idx].use_anim_optimize, anim_optimize_precision=group[self.idx].anim_optimize_precision, use_metadata=group[self.idx].use_metadata)
self.report({'INFO'}, '"'+group[self.idx].name+'"' + " Group exported." )
msg = "Export Group "+group[self.idx].name
bpy.ops.ed.undo_push(message="")
bpy.ops.ed.undo()
bpy.ops.ed.undo_push(message=msg)
else:
self.report({'INFO'}, "Define Export Name and Export Path." )
return{'FINISHED'}
class add_export_group(bpy.types.Operator):
bl_idname = "scene.godot_add_export_group"
bl_label = "Adds a new export Group"
bl_description = "Creates a new Export Group with the selected Objects assigned to it."
def execute(self,context):
scene = context.scene
item = scene.godot_export_groups.add()
item.name = "New Group"
for object in context.selected_objects:
node = item.nodes.add()
node.name = object.name
scene.godot_export_groups_index = len(scene.godot_export_groups)-1
bpy.ops.ed.undo_push(message="Create New Export Group")
return{'FINISHED'}
class del_export_group(bpy.types.Operator):
bl_idname = "scene.godot_delete_export_group"
bl_label = "Delets the selected export Group"
bl_description = "Delets the active Export Group."
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_confirm(self,event)
def execute(self,context):
scene = context.scene
scene.godot_export_groups.remove(scene.godot_export_groups_index)
if scene.godot_export_groups_index > 0:
scene.godot_export_groups_index -= 1
bpy.ops.ed.undo_push(message="Delete Export Group")
return{'FINISHED'}
class godot_node_list(bpy.types.PropertyGroup):
name = StringProperty()
class godot_export_groups(bpy.types.PropertyGroup):
name = StringProperty(name="Group Name")
export_name = StringProperty(name="scene_name")
nodes = CollectionProperty(type=godot_node_list)
export_path = StringProperty(subtype="DIR_PATH")
active = BoolProperty(default=True,description="Export Group")
object_types = EnumProperty(name="Object Types",options={'ENUM_FLAG'},items=(('EMPTY', "Empty", ""),('CAMERA', "Camera", ""),('LAMP', "Lamp", ""),('ARMATURE', "Armature", ""),('MESH', "Mesh", ""),('CURVE', "Curve", ""),),default={'EMPTY', 'CAMERA', 'LAMP', 'ARMATURE', 'MESH','CURVE'})
apply_scale = BoolProperty(name="Apply Scale",description="Apply Scale before export.",default=False)
apply_rot = BoolProperty(name="Apply Rotation",description="Apply Rotation before export.",default=False)
apply_loc = BoolProperty(name="Apply Location",description="Apply Location before export.",default=False)
use_export_selected = BoolProperty(name="Selected Objects",description="Export only selected objects (and visible in active layers if that applies).",default=True)
use_mesh_modifiers = BoolProperty(name="Apply Modifiers",description="Apply modifiers to mesh objects (on a copy!).",default=True)
use_tangent_arrays = BoolProperty(name="Tangent Arrays",description="Export Tangent and Binormal arrays (for normalmapping).",default=False)
use_triangles = BoolProperty(name="Triangulate",description="Export Triangles instead of Polygons.",default=False)
use_copy_images = BoolProperty(name="Copy Images",description="Copy Images (create images/ subfolder)",default=False)
use_active_layers = BoolProperty(name="Active Layers",description="Export only objects on the active layers.",default=True)
use_anim = BoolProperty(name="Export Animation",description="Export keyframe animation",default=False)
use_anim_action_all = BoolProperty(name="All Actions",description=("Export all actions for the first armature found in separate DAE files"),default=False)
use_anim_skip_noexp = BoolProperty(name="Skip (-noexp) Actions",description="Skip exporting of actions whose name end in (-noexp). Useful to skip control animations.",default=True)
use_anim_optimize = BoolProperty(name="Optimize Keyframes",description="Remove double keyframes",default=True)
anim_optimize_precision = FloatProperty(name="Precision",description=("Tolerence for comparing double keyframes (higher for greater accuracy)"),min=1, max=16,soft_min=1, soft_max=16,default=6.0)
use_metadata = BoolProperty(name="Use Metadata",default=True,options={'HIDDEN'})
use_include_particle_duplicates = BoolProperty(name="Include Particle Duplicates",default=True)
def register():
bpy.utils.register_class(godot_export_manager)
bpy.utils.register_class(godot_node_list)
bpy.utils.register_class(godot_export_groups)
bpy.utils.register_class(add_export_group)
bpy.utils.register_class(del_export_group)
bpy.utils.register_class(export_all_groups)
bpy.utils.register_class(export_groups_autosave)
bpy.utils.register_class(export_group)
bpy.utils.register_class(add_objects_to_group)
bpy.utils.register_class(del_objects_from_group)
bpy.utils.register_class(select_group_objects)
bpy.utils.register_class(UI_List_Godot)
bpy.types.Scene.godot_export_groups = CollectionProperty(type=godot_export_groups)
bpy.types.Scene.godot_export_groups_index = IntProperty(default=0,min=0)
def unregister():
bpy.utils.unregister_class(godot_export_manager)
bpy.utils.unregister_class(godot_node_list)
bpy.utils.unregister_class(godot_export_groups)
bpy.utils.unregister_class(export_groups_autosave)
bpy.utils.unregister_class(add_export_group)
bpy.utils.unregister_class(del_export_group)
bpy.utils.unregister_class(export_all_groups)
bpy.utils.unregister_class(export_group)
bpy.utils.unregister_class(add_objects_to_group)
bpy.utils.unregister_class(del_objects_from_group)
bpy.utils.unregister_class(select_group_objects)
bpy.utils.unregister_class(UI_List_Godot)
@persistent
def auto_export(dummy):
bpy.ops.scene.godot_export_groups_autosave()
bpy.app.handlers.save_post.append(auto_export)
if __name__ == "__main__":
register()
| mit |
ProgVal/cjdns | node_build/dependencies/libuv/build/gyp/test/copies/gyptest-default.py | 100 | 1347 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies using the build tool default.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('copies.gyp',
'-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('copies.gyp', chdir='relocate/src')
test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
test.built_file_must_match('copies-out/file2',
'file2 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/file3',
'file3 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/file4',
'file4 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/subdir/file5',
'file5 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/subdir/file6',
'file6 contents\n',
chdir='relocate/src')
test.pass_test()
| gpl-3.0 |
marcusramberg/dotfiles | bin/.venv-ansible-venv/lib/python2.6/site-packages/ansible/modules/extras/cloud/misc/ovirt.py | 32 | 14190 | #!/usr/bin/python
# (c) 2013, Vincent Van der Kussen <vincent at vanderkussen.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ovirt
author: Vincent Van der Kussen
short_description: oVirt/RHEV platform management
description:
- allows you to create new instances, either from scratch or an image, in addition to deleting or stopping instances on the oVirt/RHEV platform
version_added: "1.4"
options:
user:
description:
- the user to authenticate with
default: null
required: true
aliases: []
url:
description:
- the url of the oVirt instance
default: null
required: true
aliases: []
instance_name:
description:
- the name of the instance to use
default: null
required: true
aliases: [ vmname ]
password:
description:
- password of the user to authenticate with
default: null
required: true
aliases: []
image:
description:
- template to use for the instance
default: null
required: false
aliases: []
resource_type:
description:
- whether you want to deploy an image or create an instance from scratch.
default: null
required: false
aliases: []
choices: [ 'new', 'template' ]
zone:
description:
- deploy the image to this oVirt cluster
default: null
required: false
aliases: []
instance_disksize:
description:
- size of the instance's disk in GB
default: null
required: false
aliases: [ vm_disksize]
instance_cpus:
description:
- the instance's number of cpu's
default: 1
required: false
aliases: [ vmcpus ]
instance_nic:
description:
- name of the network interface in oVirt/RHEV
default: null
required: false
aliases: [ vmnic ]
instance_network:
description:
- the logical network the machine should belong to
default: rhevm
required: false
aliases: [ vmnetwork ]
instance_mem:
description:
- the instance's amount of memory in MB
default: null
required: false
aliases: [ vmmem ]
instance_type:
description:
- define if the instance is a server or desktop
default: server
required: false
aliases: [ vmtype ]
choices: [ 'server', 'desktop' ]
disk_alloc:
description:
- define if disk is thin or preallocated
default: thin
required: false
aliases: []
choices: [ 'thin', 'preallocated' ]
disk_int:
description:
- interface type of the disk
default: virtio
required: false
aliases: []
choices: [ 'virtio', 'ide' ]
instance_os:
description:
- type of Operating System
default: null
required: false
aliases: [ vmos ]
instance_cores:
description:
- define the instance's number of cores
default: 1
required: false
aliases: [ vmcores ]
sdomain:
description:
- the Storage Domain where you want to create the instance's disk on.
default: null
required: false
aliases: []
region:
description:
- the oVirt/RHEV datacenter where you want to deploy to
default: null
required: false
aliases: []
state:
description:
- create, terminate or remove instances
default: 'present'
required: false
aliases: []
choices: ['present', 'absent', 'shutdown', 'started', 'restarted']
requirements: [ "ovirt-engine-sdk" ]
'''
EXAMPLES = '''
# Basic example provisioning from image.
action: ovirt >
user=admin@internal
url=https://ovirt.example.com
instance_name=ansiblevm04
password=secret
image=centos_64
zone=cluster01
resource_type=template"
# Full example to create new instance from scratch
action: ovirt >
instance_name=testansible
resource_type=new
instance_type=server
user=admin@internal
password=secret
url=https://ovirt.example.com
instance_disksize=10
zone=cluster01
region=datacenter1
instance_cpus=1
instance_nic=nic1
instance_network=rhevm
instance_mem=1000
disk_alloc=thin
sdomain=FIBER01
instance_cores=1
instance_os=rhel_6x64
disk_int=virtio"
# stopping an instance
action: ovirt >
instance_name=testansible
state=stopped
user=admin@internal
password=secret
url=https://ovirt.example.com
# starting an instance
action: ovirt >
instance_name=testansible
state=started
user=admin@internal
password=secret
url=https://ovirt.example.com
'''
import sys
try:
from ovirtsdk.api import API
from ovirtsdk.xml import params
except ImportError:
print "failed=True msg='ovirtsdk required for this module'"
sys.exit(1)
# ------------------------------------------------------------------- #
# create connection with API
#
def conn(url, user, password):
api = API(url=url, username=user, password=password, insecure=True)
try:
value = api.test()
except:
print "error connecting to the oVirt API"
sys.exit(1)
return api
# ------------------------------------------------------------------- #
# Create VM from scratch
def create_vm(conn, vmtype, vmname, zone, vmdisk_size, vmcpus, vmnic, vmnetwork, vmmem, vmdisk_alloc, sdomain, vmcores, vmos, vmdisk_int):
if vmdisk_alloc == 'thin':
# define VM params
vmparams = params.VM(name=vmname,cluster=conn.clusters.get(name=zone),os=params.OperatingSystem(type_=vmos),template=conn.templates.get(name="Blank"),memory=1024 * 1024 * int(vmmem),cpu=params.CPU(topology=params.CpuTopology(cores=int(vmcores))), type_=vmtype)
# define disk params
vmdisk= params.Disk(size=1024 * 1024 * 1024 * int(vmdisk_size), wipe_after_delete=True, sparse=True, interface=vmdisk_int, type_="System", format='cow',
storage_domains=params.StorageDomains(storage_domain=[conn.storagedomains.get(name=sdomain)]))
# define network parameters
network_net = params.Network(name=vmnetwork)
nic_net1 = params.NIC(name='nic1', network=network_net, interface='virtio')
elif vmdisk_alloc == 'preallocated':
# define VM params
vmparams = params.VM(name=vmname,cluster=conn.clusters.get(name=zone),os=params.OperatingSystem(type_=vmos),template=conn.templates.get(name="Blank"),memory=1024 * 1024 * int(vmmem),cpu=params.CPU(topology=params.CpuTopology(cores=int(vmcores))) ,type_=vmtype)
# define disk params
vmdisk= params.Disk(size=1024 * 1024 * 1024 * int(vmdisk_size), wipe_after_delete=True, sparse=False, interface=vmdisk_int, type_="System", format='raw',
storage_domains=params.StorageDomains(storage_domain=[conn.storagedomains.get(name=sdomain)]))
# define network parameters
network_net = params.Network(name=vmnetwork)
nic_net1 = params.NIC(name=vmnic, network=network_net, interface='virtio')
try:
conn.vms.add(vmparams)
except:
print "Error creating VM with specified parameters"
sys.exit(1)
vm = conn.vms.get(name=vmname)
try:
vm.disks.add(vmdisk)
except:
print "Error attaching disk"
try:
vm.nics.add(nic_net1)
except:
print "Error adding nic"
# create an instance from a template
def create_vm_template(conn, vmname, image, zone):
vmparams = params.VM(name=vmname, cluster=conn.clusters.get(name=zone), template=conn.templates.get(name=image),disks=params.Disks(clone=True))
try:
conn.vms.add(vmparams)
except:
print 'error adding template %s' % image
sys.exit(1)
# start instance
def vm_start(conn, vmname):
vm = conn.vms.get(name=vmname)
vm.start()
# Stop instance
def vm_stop(conn, vmname):
vm = conn.vms.get(name=vmname)
vm.stop()
# restart instance
def vm_restart(conn, vmname):
state = vm_status(conn, vmname)
vm = conn.vms.get(name=vmname)
vm.stop()
while conn.vms.get(vmname).get_status().get_state() != 'down':
time.sleep(5)
vm.start()
# remove an instance
def vm_remove(conn, vmname):
vm = conn.vms.get(name=vmname)
vm.delete()
# ------------------------------------------------------------------- #
# VM statuses
#
# Get the VMs status
def vm_status(conn, vmname):
status = conn.vms.get(name=vmname).status.state
print "vm status is : %s" % status
return status
# Get VM object and return it's name if object exists
def get_vm(conn, vmname):
vm = conn.vms.get(name=vmname)
if vm == None:
name = "empty"
print "vmname: %s" % name
else:
name = vm.get_name()
print "vmname: %s" % name
return name
# ------------------------------------------------------------------- #
# Hypervisor operations
#
# not available yet
# ------------------------------------------------------------------- #
# Main
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent', 'shutdown', 'started', 'restart']),
#name = dict(required=True),
user = dict(required=True),
url = dict(required=True),
instance_name = dict(required=True, aliases=['vmname']),
password = dict(required=True),
image = dict(),
resource_type = dict(choices=['new', 'template']),
zone = dict(),
instance_disksize = dict(aliases=['vm_disksize']),
instance_cpus = dict(default=1, aliases=['vmcpus']),
instance_nic = dict(aliases=['vmnic']),
instance_network = dict(default='rhevm', aliases=['vmnetwork']),
instance_mem = dict(aliases=['vmmem']),
instance_type = dict(default='server', aliases=['vmtype'], choices=['server', 'desktop']),
disk_alloc = dict(default='thin', choices=['thin', 'preallocated']),
disk_int = dict(default='virtio', choices=['virtio', 'ide']),
instance_os = dict(aliases=['vmos']),
instance_cores = dict(default=1, aliases=['vmcores']),
sdomain = dict(),
region = dict(),
)
)
state = module.params['state']
user = module.params['user']
url = module.params['url']
vmname = module.params['instance_name']
password = module.params['password']
image = module.params['image'] # name of the image to deploy
resource_type = module.params['resource_type'] # template or from scratch
zone = module.params['zone'] # oVirt cluster
vmdisk_size = module.params['instance_disksize'] # disksize
vmcpus = module.params['instance_cpus'] # number of cpu
vmnic = module.params['instance_nic'] # network interface
vmnetwork = module.params['instance_network'] # logical network
vmmem = module.params['instance_mem'] # mem size
vmdisk_alloc = module.params['disk_alloc'] # thin, preallocated
vmdisk_int = module.params['disk_int'] # disk interface virtio or ide
vmos = module.params['instance_os'] # Operating System
vmtype = module.params['instance_type'] # server or desktop
vmcores = module.params['instance_cores'] # number of cores
sdomain = module.params['sdomain'] # storage domain to store disk on
region = module.params['region'] # oVirt Datacenter
#initialize connection
c = conn(url+"/api", user, password)
if state == 'present':
if get_vm(c, vmname) == "empty":
if resource_type == 'template':
create_vm_template(c, vmname, image, zone)
module.exit_json(changed=True, msg="deployed VM %s from template %s" % (vmname,image))
elif resource_type == 'new':
# FIXME: refactor, use keyword args.
create_vm(c, vmtype, vmname, zone, vmdisk_size, vmcpus, vmnic, vmnetwork, vmmem, vmdisk_alloc, sdomain, vmcores, vmos, vmdisk_int)
module.exit_json(changed=True, msg="deployed VM %s from scratch" % vmname)
else:
module.exit_json(changed=False, msg="You did not specify a resource type")
else:
module.exit_json(changed=False, msg="VM %s already exists" % vmname)
if state == 'started':
if vm_status(c, vmname) == 'up':
module.exit_json(changed=False, msg="VM %s is already running" % vmname)
else:
vm_start(c, vmname)
module.exit_json(changed=True, msg="VM %s started" % vmname)
if state == 'shutdown':
if vm_status(c, vmname) == 'down':
module.exit_json(changed=False, msg="VM %s is already shutdown" % vmname)
else:
vm_stop(c, vmname)
module.exit_json(changed=True, msg="VM %s is shutting down" % vmname)
if state == 'restart':
if vm_status(c, vmname) == 'up':
vm_restart(c, vmname)
module.exit_json(changed=True, msg="VM %s is restarted" % vmname)
else:
module.exit_json(changed=False, msg="VM %s is not running" % vmname)
if state == 'absent':
if get_vm(c, vmname) == "empty":
module.exit_json(changed=False, msg="VM %s does not exist" % vmname)
else:
vm_remove(c, vmname)
module.exit_json(changed=True, msg="VM %s removed" % vmname)
# import module snippets
from ansible.module_utils.basic import *
main()
| mit |
savoirfairelinux/OpenUpgrade | addons/auth_ldap/users_ldap.py | 38 | 10550 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ldap
import logging
from ldap.filter import filter_format
import openerp.exceptions
from openerp import tools
from openerp.osv import fields, osv
from openerp import SUPERUSER_ID
from openerp.modules.registry import RegistryManager
_logger = logging.getLogger(__name__)
class CompanyLDAP(osv.osv):
_name = 'res.company.ldap'
_order = 'sequence'
_rec_name = 'ldap_server'
def get_ldap_dicts(self, cr, ids=None):
"""
Retrieve res_company_ldap resources from the database in dictionary
format.
:param list ids: Valid ids of model res_company_ldap. If not \
specified, process all resources (unlike other ORM methods).
:return: ldap configurations
:rtype: list of dictionaries
"""
if ids:
id_clause = 'AND id IN (%s)'
args = [tuple(ids)]
else:
id_clause = ''
args = []
cr.execute("""
SELECT id, company, ldap_server, ldap_server_port, ldap_binddn,
ldap_password, ldap_filter, ldap_base, "user", create_user,
ldap_tls
FROM res_company_ldap
WHERE ldap_server != '' """ + id_clause + """ ORDER BY sequence
""", args)
return cr.dictfetchall()
def connect(self, conf):
"""
Connect to an LDAP server specified by an ldap
configuration dictionary.
:param dict conf: LDAP configuration
:return: an LDAP object
"""
uri = 'ldap://%s:%d' % (conf['ldap_server'],
conf['ldap_server_port'])
connection = ldap.initialize(uri)
if conf['ldap_tls']:
connection.start_tls_s()
return connection
def authenticate(self, conf, login, password):
"""
Authenticate a user against the specified LDAP server.
In order to prevent an unintended 'unauthenticated authentication',
which is an anonymous bind with a valid dn and a blank password,
check for empty passwords explicitely (:rfc:`4513#section-6.3.1`)
:param dict conf: LDAP configuration
:param login: username
:param password: Password for the LDAP user
:return: LDAP entry of authenticated user or False
:rtype: dictionary of attributes
"""
if not password:
return False
entry = False
filter = filter_format(conf['ldap_filter'], (login,))
try:
results = self.query(conf, filter)
# Get rid of (None, attrs) for searchResultReference replies
results = [i for i in results if i[0]]
if results and len(results) == 1:
dn = results[0][0]
conn = self.connect(conf)
conn.simple_bind_s(dn, password)
conn.unbind()
entry = results[0]
except ldap.INVALID_CREDENTIALS:
return False
except ldap.LDAPError, e:
_logger.error('An LDAP exception occurred: %s', e)
return entry
def query(self, conf, filter, retrieve_attributes=None):
"""
Query an LDAP server with the filter argument and scope subtree.
Allow for all authentication methods of the simple authentication
method:
- authenticated bind (non-empty binddn + valid password)
- anonymous bind (empty binddn + empty password)
- unauthenticated authentication (non-empty binddn + empty password)
.. seealso::
:rfc:`4513#section-5.1` - LDAP: Simple Authentication Method.
:param dict conf: LDAP configuration
:param filter: valid LDAP filter
:param list retrieve_attributes: LDAP attributes to be retrieved. \
If not specified, return all attributes.
:return: ldap entries
:rtype: list of tuples (dn, attrs)
"""
results = []
try:
conn = self.connect(conf)
conn.simple_bind_s(conf['ldap_binddn'] or '',
conf['ldap_password'] or '')
results = conn.search_st(conf['ldap_base'], ldap.SCOPE_SUBTREE,
filter, retrieve_attributes, timeout=60)
conn.unbind()
except ldap.INVALID_CREDENTIALS:
_logger.error('LDAP bind failed.')
except ldap.LDAPError, e:
_logger.error('An LDAP exception occurred: %s', e)
return results
def map_ldap_attributes(self, cr, uid, conf, login, ldap_entry):
"""
Compose values for a new resource of model res_users,
based upon the retrieved ldap entry and the LDAP settings.
:param dict conf: LDAP configuration
:param login: the new user's login
:param tuple ldap_entry: single LDAP result (dn, attrs)
:return: parameters for a new resource of model res_users
:rtype: dict
"""
values = { 'name': ldap_entry[1]['cn'][0],
'login': login,
'company_id': conf['company']
}
return values
def get_or_create_user(self, cr, uid, conf, login, ldap_entry,
context=None):
"""
Retrieve an active resource of model res_users with the specified
login. Create the user if it is not initially found.
:param dict conf: LDAP configuration
:param login: the user's login
:param tuple ldap_entry: single LDAP result (dn, attrs)
:return: res_users id
:rtype: int
"""
user_id = False
login = tools.ustr(login.lower())
cr.execute("SELECT id, active FROM res_users WHERE lower(login)=%s", (login,))
res = cr.fetchone()
if res:
if res[1]:
user_id = res[0]
elif conf['create_user']:
_logger.debug("Creating new OpenERP user \"%s\" from LDAP" % login)
user_obj = self.pool['res.users']
values = self.map_ldap_attributes(cr, uid, conf, login, ldap_entry)
if conf['user']:
values['active'] = True
user_id = user_obj.copy(cr, SUPERUSER_ID, conf['user'],
default=values)
else:
user_id = user_obj.create(cr, SUPERUSER_ID, values)
return user_id
_columns = {
'sequence': fields.integer('Sequence'),
'company': fields.many2one('res.company', 'Company', required=True,
ondelete='cascade'),
'ldap_server': fields.char('LDAP Server address', size=64, required=True),
'ldap_server_port': fields.integer('LDAP Server port', required=True),
'ldap_binddn': fields.char('LDAP binddn', size=64,
help=("The user account on the LDAP server that is used to query "
"the directory. Leave empty to connect anonymously.")),
'ldap_password': fields.char('LDAP password', size=64,
help=("The password of the user account on the LDAP server that is "
"used to query the directory.")),
'ldap_filter': fields.char('LDAP filter', size=256, required=True),
'ldap_base': fields.char('LDAP base', size=64, required=True),
'user': fields.many2one('res.users', 'Template User',
help="User to copy when creating new users"),
'create_user': fields.boolean('Create user',
help="Automatically create local user accounts for new users authenticating via LDAP"),
'ldap_tls': fields.boolean('Use TLS',
help="Request secure TLS/SSL encryption when connecting to the LDAP server. "
"This option requires a server with STARTTLS enabled, "
"otherwise all authentication attempts will fail."),
}
_defaults = {
'ldap_server': '127.0.0.1',
'ldap_server_port': 389,
'sequence': 10,
'create_user': True,
}
class res_company(osv.osv):
_inherit = "res.company"
_columns = {
'ldaps': fields.one2many(
'res.company.ldap', 'company', 'LDAP Parameters'),
}
class users(osv.osv):
_inherit = "res.users"
def login(self, db, login, password):
user_id = super(users, self).login(db, login, password)
if user_id:
return user_id
registry = RegistryManager.get(db)
with registry.cursor() as cr:
ldap_obj = registry.get('res.company.ldap')
for conf in ldap_obj.get_ldap_dicts(cr):
entry = ldap_obj.authenticate(conf, login, password)
if entry:
user_id = ldap_obj.get_or_create_user(
cr, SUPERUSER_ID, conf, login, entry)
if user_id:
break
return user_id
def check_credentials(self, cr, uid, password):
try:
super(users, self).check_credentials(cr, uid, password)
except openerp.exceptions.AccessDenied:
cr.execute('SELECT login FROM res_users WHERE id=%s AND active=TRUE',
(int(uid),))
res = cr.fetchone()
if res:
ldap_obj = self.pool['res.company.ldap']
for conf in ldap_obj.get_ldap_dicts(cr):
if ldap_obj.authenticate(conf, res[0], password):
return
raise
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
wangyum/tensorflow | tensorflow/contrib/saved_model/python/saved_model/signature_def_utils_test.py | 64 | 8192 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SignatureDef utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.saved_model.python.saved_model import signature_def_utils as signature_def_contrib_utils
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import signature_def_utils
from tensorflow.python.saved_model import utils
class SignatureDefUtilsTest(test.TestCase):
def _add_to_signature_def_map(self, meta_graph_def, signature_def_map=None):
if signature_def_map is not None:
for key in signature_def_map:
meta_graph_def.signature_def[key].CopyFrom(signature_def_map[key])
def _check_tensor_info(self, tensor_info_map, map_key, expected_tensor_name):
actual_tensor_info = tensor_info_map[map_key]
self.assertEqual(expected_tensor_name, actual_tensor_info.name)
def testGetSignatureDefByKey(self):
x = array_ops.placeholder(dtypes.float32, 1, name="x")
x_tensor_info = utils.build_tensor_info(x)
y = array_ops.placeholder(dtypes.float32, name="y")
y_tensor_info = utils.build_tensor_info(y)
foo_signature_def = signature_def_utils.build_signature_def({
"foo-input": x_tensor_info
}, {"foo-output": y_tensor_info}, "foo-method-name")
bar_signature_def = signature_def_utils.build_signature_def({
"bar-input": x_tensor_info
}, {"bar-output": y_tensor_info}, "bar-method-name")
meta_graph_def = meta_graph_pb2.MetaGraphDef()
self._add_to_signature_def_map(
meta_graph_def, {"foo": foo_signature_def,
"bar": bar_signature_def})
# Look up a key that does not exist in the SignatureDefMap.
missing_key = "missing-key"
with self.assertRaisesRegexp(
ValueError,
"No SignatureDef with key '%s' found in MetaGraphDef" % missing_key):
signature_def_contrib_utils.get_signature_def_by_key(
meta_graph_def, missing_key)
# Look up the key, `foo` which exists in the SignatureDefMap.
foo_signature_def = signature_def_contrib_utils.get_signature_def_by_key(
meta_graph_def, "foo")
self.assertTrue("foo-method-name", foo_signature_def.method_name)
# Check inputs in signature def.
self.assertEqual(1, len(foo_signature_def.inputs))
self._check_tensor_info(foo_signature_def.inputs, "foo-input", "x:0")
# Check outputs in signature def.
self.assertEqual(1, len(foo_signature_def.outputs))
self._check_tensor_info(foo_signature_def.outputs, "foo-output", "y:0")
# Look up the key, `bar` which exists in the SignatureDefMap.
bar_signature_def = signature_def_contrib_utils.get_signature_def_by_key(
meta_graph_def, "bar")
self.assertTrue("bar-method-name", bar_signature_def.method_name)
# Check inputs in signature def.
self.assertEqual(1, len(bar_signature_def.inputs))
self._check_tensor_info(bar_signature_def.inputs, "bar-input", "x:0")
# Check outputs in signature def.
self.assertEqual(1, len(bar_signature_def.outputs))
self._check_tensor_info(bar_signature_def.outputs, "bar-output", "y:0")
def testGetSignatureDefByKeyRegression(self):
input1 = constant_op.constant("a", name="input-1")
output1 = constant_op.constant("b", name="output-1")
meta_graph_def = meta_graph_pb2.MetaGraphDef()
self._add_to_signature_def_map(meta_graph_def, {
"my_regression":
signature_def_utils.regression_signature_def(input1, output1)
})
# Look up the regression signature with the key used while saving.
signature_def = signature_def_contrib_utils.get_signature_def_by_key(
meta_graph_def, "my_regression")
# Check the method name to match the constants regression method name.
self.assertEqual(signature_constants.REGRESS_METHOD_NAME,
signature_def.method_name)
# Check inputs in signature def.
self.assertEqual(1, len(signature_def.inputs))
self._check_tensor_info(signature_def.inputs,
signature_constants.REGRESS_INPUTS, "input-1:0")
# Check outputs in signature def.
self.assertEqual(1, len(signature_def.outputs))
self._check_tensor_info(signature_def.outputs,
signature_constants.REGRESS_OUTPUTS, "output-1:0")
def testGetSignatureDefByKeyClassification(self):
input1 = constant_op.constant("a", name="input-1")
output1 = constant_op.constant("b", name="output-1")
output2 = constant_op.constant("c", name="output-2")
meta_graph_def = meta_graph_pb2.MetaGraphDef()
self._add_to_signature_def_map(meta_graph_def, {
"my_classification":
signature_def_utils.classification_signature_def(
input1, output1, output2)
})
# Look up the classification signature def with the key used while saving.
signature_def = signature_def_contrib_utils.get_signature_def_by_key(
meta_graph_def, "my_classification")
# Check the method name to match the constants classification method name.
self.assertEqual(signature_constants.CLASSIFY_METHOD_NAME,
signature_def.method_name)
# Check inputs in signature def.
self.assertEqual(1, len(signature_def.inputs))
self._check_tensor_info(signature_def.inputs,
signature_constants.CLASSIFY_INPUTS, "input-1:0")
# Check outputs in signature def.
self.assertEqual(2, len(signature_def.outputs))
self._check_tensor_info(signature_def.outputs,
signature_constants.CLASSIFY_OUTPUT_CLASSES,
"output-1:0")
self._check_tensor_info(signature_def.outputs,
signature_constants.CLASSIFY_OUTPUT_SCORES,
"output-2:0")
def testPredictionSignatureDef(self):
input1 = constant_op.constant("a", name="input-1")
input2 = constant_op.constant("b", name="input-2")
output1 = constant_op.constant("c", name="output-1")
output2 = constant_op.constant("d", name="output-2")
meta_graph_def = meta_graph_pb2.MetaGraphDef()
self._add_to_signature_def_map(meta_graph_def, {
"my_prediction":
signature_def_utils.predict_signature_def({
"input-1": input1,
"input-2": input2
}, {"output-1": output1,
"output-2": output2})
})
# Look up the prediction signature def with the key used while saving.
signature_def = signature_def_contrib_utils.get_signature_def_by_key(
meta_graph_def, "my_prediction")
self.assertEqual(signature_constants.PREDICT_METHOD_NAME,
signature_def.method_name)
# Check inputs in signature def.
self.assertEqual(2, len(signature_def.inputs))
self._check_tensor_info(signature_def.inputs, "input-1", "input-1:0")
self._check_tensor_info(signature_def.inputs, "input-2", "input-2:0")
# Check outputs in signature def.
self.assertEqual(2, len(signature_def.outputs))
self._check_tensor_info(signature_def.outputs, "output-1", "output-1:0")
self._check_tensor_info(signature_def.outputs, "output-2", "output-2:0")
if __name__ == "__main__":
test.main()
| apache-2.0 |
jakevdp/lombscargle | ez_setup.py | 164 | 12155 | #!python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import os
import shutil
import sys
import tempfile
import tarfile
import optparse
import subprocess
import platform
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
DEFAULT_VERSION = "1.4.2"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _check_call_py24(cmd, *args, **kwargs):
res = subprocess.call(cmd, *args, **kwargs)
class CalledProcessError(Exception):
pass
if not res == 0:
msg = "Command '%s' return non-zero exit status %d" % (cmd, res)
raise CalledProcessError(msg)
vars(subprocess).setdefault('check_call', _check_call_py24)
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
del sys.modules['pkg_resources']
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
import pkg_resources
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("setuptools>=" + version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of setuptools (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U setuptools'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
def _clean_check(cmd, target):
"""
Run the command to download target. If the command fails, clean up before
re-raising the error.
"""
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
if os.access(target, os.F_OK):
os.unlink(target)
raise
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell (which will validate
trust). Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
cmd = [
'powershell',
'-Command',
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
]
_clean_check(cmd, target)
def has_powershell():
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
_clean_check(cmd, target)
def has_curl():
cmd = ['curl', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
_clean_check(cmd, target)
def has_wget():
cmd = ['wget', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = [
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
]
for dl in downloaders:
if dl.viable():
return dl
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15,
downloader_factory=get_best_downloader):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
tgz_name = "setuptools-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package
"""
install_args = []
if options.user_install:
if sys.version_info < (2, 6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
install_args.append('--user')
return install_args
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base,
downloader_factory=options.downloader_factory)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
jorik041/phantomjs | src/breakpad/src/tools/gyp/test/defines/gyptest-defines-env-regyp.py | 151 | 1312 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies build of an executable with C++ define specified by a gyp define, and
the use of the environment during regeneration when the gyp file changes.
"""
import os
import TestGyp
# Regenerating build files when a gyp file changes is currently only supported
# by the make generator.
test = TestGyp.TestGyp(formats=['make'])
try:
os.environ['GYP_DEFINES'] = 'value=50'
test.run_gyp('defines.gyp')
finally:
# We clear the environ after calling gyp. When the auto-regeneration happens,
# the same define should be reused anyway. Reset to empty string first in
# case the platform doesn't support unsetenv.
os.environ['GYP_DEFINES'] = ''
del os.environ['GYP_DEFINES']
test.build('defines.gyp')
expect = """\
FOO is defined
VALUE is 1
"""
test.run_built_executable('defines', stdout=expect)
# Sleep so that the changed gyp file will have a newer timestamp than the
# previously generated build files.
test.sleep()
test.write('defines.gyp', test.read('defines-env.gyp'))
test.build('defines.gyp', test.ALL)
expect = """\
VALUE is 50
"""
test.run_built_executable('defines', stdout=expect)
test.pass_test()
| bsd-3-clause |
CSIRT-MU/Stream4Flow | applications/statistics/tls_classification/web-interface/controllers/tls_classification_statistics.py | 1 | 3061 | # -*- coding: utf-8 -*-
# Import Elasticsearch library
import elasticsearch
from elasticsearch_dsl import Search, Q, A
# Import advanced python collections
import collections
# Import global functions
from global_functions import escape
#----------------- Main Functions -------------------#
def tls_classification_statistics():
"""
Show the main page of the TLS classification statistics section.
:return: Empty dictionary
"""
# Use standard view
response.view = request.controller + '/tls_classification_statistics.html'
return dict()
#----------------- Chart Functions ------------------#
def get_top_n_statistics():
"""
Obtains TOP N TLS classification statistics.
:return: JSON with status "ok" or "error" and requested data.
"""
# Check login
if not session.logged:
json_response = '{"status": "Error", "data": "You must be logged!"}'
return json_response
# Check mandatory inputs
if not (request.get_vars.beginning and request.get_vars.end and request.get_vars.type and request.get_vars.number):
json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
return json_response
# Parse inputs and set correct format
beginning = escape(request.get_vars.beginning)
end = escape(request.get_vars.end)
type = escape(request.get_vars.type)
number = int(escape(request.get_vars.number))
try:
# Elastic query
client = elasticsearch.Elasticsearch([{'host': myconf.get('consumer.hostname'), 'port': myconf.get('consumer.port')}])
elastic_bool = []
elastic_bool.append({'range': {'@timestamp': {'gte': beginning, 'lte': end}}})
elastic_bool.append({'term': {'@stat_type': type}})
# Prepare query
qx = Q({'bool': {'must': elastic_bool}})
search_ip = Search(using=client, index='_all').query(qx)
search_ip.aggs.bucket('all_nested', 'nested', path='data_array') \
.bucket('by_key', 'terms', field='data_array.key.raw', size=2147483647) \
.bucket('stats_sum', 'sum', field='data_array.value')
# Get result
results = search_ip.execute()
# Prepare data variable
data = ""
# Prepare ordered collection
counter = collections.Counter()
for all_buckets in results.aggregations.all_nested.by_key:
counter[all_buckets.key] += int(all_buckets.stats_sum.value)
# Select top N (number) values
for value, count in counter.most_common(number):
data += value + "," + str(count) + ","
# Remove trailing comma
data = data[:-1]
if data == "":
json_response = '{"status": "Empty", "data": "No data found"}'
else:
json_response = '{"status": "Ok", "data": "' + data + '"}'
return json_response
except Exception as e:
json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(str(e)) + '"}'
return json_response
| mit |
dholbach/snapcraft | snapcraft/common.py | 10 | 1319 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from snapcraft.internal.common import get_python2_path # noqa
from snapcraft.internal.common import isurl # noqa
from snapcraft.internal.common import get_include_paths # noqa
from snapcraft.internal.common import get_library_paths # noqa
# These are now available via file_utils, but don't break API.
from snapcraft.file_utils import link_or_copy # noqa
from snapcraft.file_utils import replace_in_file # noqa
# These are now available via formatting_utils, but don't break API.
from snapcraft.formatting_utils import combine_paths # noqa
from snapcraft.formatting_utils import format_path_variable # noqa
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.