repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
JanVan01/gwot-physical
|
models/locations.py
|
1
|
2735
|
from utils.utils import Database
from models.base import BaseModel, BaseMultiModel
class Location(BaseModel):
def __init__(self, id = None):
super().__init__(['id', 'name', 'lon', 'lat', 'height'])
self.id = id
self.name = None
self.lon = None
self.lat = None
self.height = None
def from_dict(self, dict):
super().from_dict(dict)
if 'id' in dict:
self.set_id(dict['id'])
if 'name' in dict:
self.set_name(dict['name'])
if 'lon' in dict and 'lat' in dict:
self.set_position(dict['lon'], dict['lat'])
if 'height' in dict:
self.set_height(dict['height'])
def create(self):
if self.lon is None or self.lat is None or self.height is None:
return False
cur = Database.Instance().dict_cursor()
cur.execute("INSERT INTO Locations (name, geom, height) VALUES (%s, ST_GeomFromText(%s, 4326), %s) RETURNING id", [self.name, self.get_point_wkt(), self.height])
data = cur.fetchone()
self.id = data['id']
if self.id > 0:
return True
else:
return False
def read(self):
if self.id is None:
return False
cur = Database.Instance().dict_cursor()
cur.execute("SELECT *, ST_X(geom) AS lon, ST_Y(geom) AS lat FROM Locations WHERE id = %s", [self.id])
if cur.rowcount > 0:
self.from_dict(cur.fetchone())
return True
else:
return False
def update(self):
if self.id is None or self.lon is None or self.lat is None or self.height is None:
return False
cur = Database.Instance().dict_cursor()
cur.execute("UPDATE Locations SET name = %s, geom = ST_GeomFromText(%s, 4326), height = %s WHERE id = %s", [self.name, self.get_point_wkt(), self.height, self.id])
if cur.rowcount > 0:
return True
else:
return False
def delete(self):
if self.id is None:
return False
cur = Database.Instance().cursor()
cur.execute("DELETE FROM Locations WHERE id = %s", [self.id])
if cur.rowcount > 0:
self.id = None
return True
else:
return False
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_position(self):
return {
'lon': self.lon,
'lat': self.lat
}
def get_point_wkt(self):
return "POINT(" + str(self.lon) + " " + str(self.lat) + ")"
def get_longitude(self):
return self.lon
def get_latitude(self):
return self.lat
def set_position(self, lon, lat):
self.lon = lon
self.lat = lat
def get_height(self):
return self.height
def set_height(self, height):
self.height = height
class Locations(BaseMultiModel):
def create(self, pk = None):
return Location(pk)
def get_all(self):
return self._get_all("SELECT *, ST_X(geom) AS lon, ST_Y(geom) AS lat FROM Locations ORDER BY id")
|
lgpl-3.0
|
kontais/EFI-MIPS
|
ToolKit/cmds/python/Lib/test/crashed/test_multibytecodec.py
|
8
|
3379
|
#!/usr/bin/env python
#
# test_multibytecodec.py
# Unit test for multibytecodec itself
#
# $CJKCodecs: test_multibytecodec.py,v 1.8 2004/06/19 06:09:55 perky Exp $
from test import test_support
from test import test_multibytecodec_support
import unittest, StringIO, codecs
class Test_StreamWriter(unittest.TestCase):
if len(u'\U00012345') == 2: # UCS2
def test_gb18030(self):
s= StringIO.StringIO()
c = codecs.lookup('gb18030')[3](s)
c.write(u'123')
self.assertEqual(s.getvalue(), '123')
c.write(u'\U00012345')
self.assertEqual(s.getvalue(), '123\x907\x959')
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(), '123\x907\x959')
c.write(u'\U00012345'[1] + u'\U00012345' + u'\uac00\u00ac')
self.assertEqual(s.getvalue(),
'123\x907\x959\x907\x959\x907\x959\x827\xcf5\x810\x851')
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(),
'123\x907\x959\x907\x959\x907\x959\x827\xcf5\x810\x851')
self.assertRaises(UnicodeError, c.reset)
self.assertEqual(s.getvalue(),
'123\x907\x959\x907\x959\x907\x959\x827\xcf5\x810\x851')
# standard utf-8 codecs has broken StreamReader
if test_multibytecodec_support.__cjkcodecs__:
def test_utf_8(self):
s= StringIO.StringIO()
c = codecs.lookup('utf-8')[3](s)
c.write(u'123')
self.assertEqual(s.getvalue(), '123')
c.write(u'\U00012345')
self.assertEqual(s.getvalue(), '123\xf0\x92\x8d\x85')
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(), '123\xf0\x92\x8d\x85')
c.write(u'\U00012345'[1] + u'\U00012345' + u'\uac00\u00ac')
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac')
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac')
c.reset()
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac\xed\xa0\x88')
c.write(u'\U00012345'[1])
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac\xed\xa0\x88\xed\xbd\x85')
else: # UCS4
pass
def test_nullcoding(self):
self.assertEqual(''.decode('gb18030'), u'')
self.assertEqual(unicode('', 'gb18030'), u'')
self.assertEqual(u''.encode('gb18030'), '')
def test_str_decode(self):
self.assertEqual('abcd'.encode('gb18030'), 'abcd')
def test_streamwriter_strwrite(self):
s = StringIO.StringIO()
wr = codecs.getwriter('gb18030')(s)
wr.write('abcd')
self.assertEqual(s.getvalue(), 'abcd')
def test_main():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(Test_StreamWriter))
test_support.run_suite(suite)
if __name__ == "__main__":
test_main()
|
bsd-3-clause
|
kushalbhola/MyStuff
|
Practice/PythonApplication/env/Lib/site-packages/numpy/core/generate_numpy_api.py
|
12
|
7470
|
from __future__ import division, print_function
import os
import genapi
from genapi import \
TypeApi, GlobalVarApi, FunctionApi, BoolValuesApi
import numpy_api
# use annotated api when running under cpychecker
h_template = r"""
#if defined(_MULTIARRAYMODULE) || defined(WITH_CPYCHECKER_STEALS_REFERENCE_TO_ARG_ATTRIBUTE)
typedef struct {
PyObject_HEAD
npy_bool obval;
} PyBoolScalarObject;
extern NPY_NO_EXPORT PyTypeObject PyArrayMapIter_Type;
extern NPY_NO_EXPORT PyTypeObject PyArrayNeighborhoodIter_Type;
extern NPY_NO_EXPORT PyBoolScalarObject _PyArrayScalar_BoolValues[2];
%s
#else
#if defined(PY_ARRAY_UNIQUE_SYMBOL)
#define PyArray_API PY_ARRAY_UNIQUE_SYMBOL
#endif
#if defined(NO_IMPORT) || defined(NO_IMPORT_ARRAY)
extern void **PyArray_API;
#else
#if defined(PY_ARRAY_UNIQUE_SYMBOL)
void **PyArray_API;
#else
static void **PyArray_API=NULL;
#endif
#endif
%s
#if !defined(NO_IMPORT_ARRAY) && !defined(NO_IMPORT)
static int
_import_array(void)
{
int st;
PyObject *numpy = PyImport_ImportModule("numpy.core._multiarray_umath");
PyObject *c_api = NULL;
if (numpy == NULL) {
return -1;
}
c_api = PyObject_GetAttrString(numpy, "_ARRAY_API");
Py_DECREF(numpy);
if (c_api == NULL) {
PyErr_SetString(PyExc_AttributeError, "_ARRAY_API not found");
return -1;
}
#if PY_VERSION_HEX >= 0x03000000
if (!PyCapsule_CheckExact(c_api)) {
PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is not PyCapsule object");
Py_DECREF(c_api);
return -1;
}
PyArray_API = (void **)PyCapsule_GetPointer(c_api, NULL);
#else
if (!PyCObject_Check(c_api)) {
PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is not PyCObject object");
Py_DECREF(c_api);
return -1;
}
PyArray_API = (void **)PyCObject_AsVoidPtr(c_api);
#endif
Py_DECREF(c_api);
if (PyArray_API == NULL) {
PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is NULL pointer");
return -1;
}
/* Perform runtime check of C API version */
if (NPY_VERSION != PyArray_GetNDArrayCVersion()) {
PyErr_Format(PyExc_RuntimeError, "module compiled against "\
"ABI version 0x%%x but this version of numpy is 0x%%x", \
(int) NPY_VERSION, (int) PyArray_GetNDArrayCVersion());
return -1;
}
if (NPY_FEATURE_VERSION > PyArray_GetNDArrayCFeatureVersion()) {
PyErr_Format(PyExc_RuntimeError, "module compiled against "\
"API version 0x%%x but this version of numpy is 0x%%x", \
(int) NPY_FEATURE_VERSION, (int) PyArray_GetNDArrayCFeatureVersion());
return -1;
}
/*
* Perform runtime check of endianness and check it matches the one set by
* the headers (npy_endian.h) as a safeguard
*/
st = PyArray_GetEndianness();
if (st == NPY_CPU_UNKNOWN_ENDIAN) {
PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as unknown endian");
return -1;
}
#if NPY_BYTE_ORDER == NPY_BIG_ENDIAN
if (st != NPY_CPU_BIG) {
PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as "\
"big endian, but detected different endianness at runtime");
return -1;
}
#elif NPY_BYTE_ORDER == NPY_LITTLE_ENDIAN
if (st != NPY_CPU_LITTLE) {
PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as "\
"little endian, but detected different endianness at runtime");
return -1;
}
#endif
return 0;
}
#if PY_VERSION_HEX >= 0x03000000
#define NUMPY_IMPORT_ARRAY_RETVAL NULL
#else
#define NUMPY_IMPORT_ARRAY_RETVAL
#endif
#define import_array() {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import"); return NUMPY_IMPORT_ARRAY_RETVAL; } }
#define import_array1(ret) {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import"); return ret; } }
#define import_array2(msg, ret) {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, msg); return ret; } }
#endif
#endif
"""
c_template = r"""
/* These pointers will be stored in the C-object for use in other
extension modules
*/
void *PyArray_API[] = {
%s
};
"""
c_api_header = """
===========
NumPy C-API
===========
"""
def generate_api(output_dir, force=False):
basename = 'multiarray_api'
h_file = os.path.join(output_dir, '__%s.h' % basename)
c_file = os.path.join(output_dir, '__%s.c' % basename)
d_file = os.path.join(output_dir, '%s.txt' % basename)
targets = (h_file, c_file, d_file)
sources = numpy_api.multiarray_api
if (not force and not genapi.should_rebuild(targets, [numpy_api.__file__, __file__])):
return targets
else:
do_generate_api(targets, sources)
return targets
def do_generate_api(targets, sources):
header_file = targets[0]
c_file = targets[1]
doc_file = targets[2]
global_vars = sources[0]
scalar_bool_values = sources[1]
types_api = sources[2]
multiarray_funcs = sources[3]
multiarray_api = sources[:]
module_list = []
extension_list = []
init_list = []
# Check multiarray api indexes
multiarray_api_index = genapi.merge_api_dicts(multiarray_api)
genapi.check_api_dict(multiarray_api_index)
numpyapi_list = genapi.get_api_functions('NUMPY_API',
multiarray_funcs)
# FIXME: ordered_funcs_api is unused
ordered_funcs_api = genapi.order_dict(multiarray_funcs)
# Create dict name -> *Api instance
api_name = 'PyArray_API'
multiarray_api_dict = {}
for f in numpyapi_list:
name = f.name
index = multiarray_funcs[name][0]
annotations = multiarray_funcs[name][1:]
multiarray_api_dict[f.name] = FunctionApi(f.name, index, annotations,
f.return_type,
f.args, api_name)
for name, val in global_vars.items():
index, type = val
multiarray_api_dict[name] = GlobalVarApi(name, index, type, api_name)
for name, val in scalar_bool_values.items():
index = val[0]
multiarray_api_dict[name] = BoolValuesApi(name, index, api_name)
for name, val in types_api.items():
index = val[0]
multiarray_api_dict[name] = TypeApi(name, index, 'PyTypeObject', api_name)
if len(multiarray_api_dict) != len(multiarray_api_index):
keys_dict = set(multiarray_api_dict.keys())
keys_index = set(multiarray_api_index.keys())
raise AssertionError(
"Multiarray API size mismatch - "
"index has extra keys {}, dict has extra keys {}"
.format(keys_index - keys_dict, keys_dict - keys_index)
)
extension_list = []
for name, index in genapi.order_dict(multiarray_api_index):
api_item = multiarray_api_dict[name]
extension_list.append(api_item.define_from_array_api_string())
init_list.append(api_item.array_api_define())
module_list.append(api_item.internal_define())
# Write to header
s = h_template % ('\n'.join(module_list), '\n'.join(extension_list))
genapi.write_file(header_file, s)
# Write to c-code
s = c_template % ',\n'.join(init_list)
genapi.write_file(c_file, s)
# write to documentation
s = c_api_header
for func in numpyapi_list:
s += func.to_ReST()
s += '\n\n'
genapi.write_file(doc_file, s)
return targets
|
apache-2.0
|
Shanec132006/lab3
|
server/lib/werkzeug/script.py
|
318
|
11249
|
# -*- coding: utf-8 -*-
r'''
werkzeug.script
~~~~~~~~~~~~~~~
.. admonition:: Deprecated Functionality
``werkzeug.script`` is deprecated without replacement functionality.
Python's command line support improved greatly with :mod:`argparse`
and a bunch of alternative modules.
Most of the time you have recurring tasks while writing an application
such as starting up an interactive python interpreter with some prefilled
imports, starting the development server, initializing the database or
something similar.
For that purpose werkzeug provides the `werkzeug.script` module which
helps you writing such scripts.
Basic Usage
-----------
The following snippet is roughly the same in every werkzeug script::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from werkzeug import script
# actions go here
if __name__ == '__main__':
script.run()
Starting this script now does nothing because no actions are defined.
An action is a function in the same module starting with ``"action_"``
which takes a number of arguments where every argument has a default. The
type of the default value specifies the type of the argument.
Arguments can then be passed by position or using ``--name=value`` from
the shell.
Because a runserver and shell command is pretty common there are two
factory functions that create such commands::
def make_app():
from yourapplication import YourApplication
return YourApplication(...)
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(lambda: {'app': make_app()})
Using The Scripts
-----------------
The script from above can be used like this from the shell now:
.. sourcecode:: text
$ ./manage.py --help
$ ./manage.py runserver localhost 8080 --debugger --no-reloader
$ ./manage.py runserver -p 4000
$ ./manage.py shell
As you can see it's possible to pass parameters as positional arguments
or as named parameters, pretty much like Python function calls.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
'''
from __future__ import print_function
import sys
import inspect
import getopt
from os.path import basename
from werkzeug._compat import iteritems
argument_types = {
bool: 'boolean',
str: 'string',
int: 'integer',
float: 'float'
}
converters = {
'boolean': lambda x: x.lower() in ('1', 'true', 'yes', 'on'),
'string': str,
'integer': int,
'float': float
}
def run(namespace=None, action_prefix='action_', args=None):
"""Run the script. Participating actions are looked up in the caller's
namespace if no namespace is given, otherwise in the dict provided.
Only items that start with action_prefix are processed as actions. If
you want to use all items in the namespace provided as actions set
action_prefix to an empty string.
:param namespace: An optional dict where the functions are looked up in.
By default the local namespace of the caller is used.
:param action_prefix: The prefix for the functions. Everything else
is ignored.
:param args: the arguments for the function. If not specified
:data:`sys.argv` without the first argument is used.
"""
if namespace is None:
namespace = sys._getframe(1).f_locals
actions = find_actions(namespace, action_prefix)
if args is None:
args = sys.argv[1:]
if not args or args[0] in ('-h', '--help'):
return print_usage(actions)
elif args[0] not in actions:
fail('Unknown action \'%s\'' % args[0])
arguments = {}
types = {}
key_to_arg = {}
long_options = []
formatstring = ''
func, doc, arg_def = actions[args.pop(0)]
for idx, (arg, shortcut, default, option_type) in enumerate(arg_def):
real_arg = arg.replace('-', '_')
if shortcut:
formatstring += shortcut
if not isinstance(default, bool):
formatstring += ':'
key_to_arg['-' + shortcut] = real_arg
long_options.append(isinstance(default, bool) and arg or arg + '=')
key_to_arg['--' + arg] = real_arg
key_to_arg[idx] = real_arg
types[real_arg] = option_type
arguments[real_arg] = default
try:
optlist, posargs = getopt.gnu_getopt(args, formatstring, long_options)
except getopt.GetoptError as e:
fail(str(e))
specified_arguments = set()
for key, value in enumerate(posargs):
try:
arg = key_to_arg[key]
except IndexError:
fail('Too many parameters')
specified_arguments.add(arg)
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for argument %s (%s): %s' % (key, arg, value))
for key, value in optlist:
arg = key_to_arg[key]
if arg in specified_arguments:
fail('Argument \'%s\' is specified twice' % arg)
if types[arg] == 'boolean':
if arg.startswith('no_'):
value = 'no'
else:
value = 'yes'
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for \'%s\': %s' % (key, value))
newargs = {}
for k, v in iteritems(arguments):
newargs[k.startswith('no_') and k[3:] or k] = v
arguments = newargs
return func(**arguments)
def fail(message, code=-1):
"""Fail with an error."""
print('Error: %s' % message, file=sys.stderr)
sys.exit(code)
def find_actions(namespace, action_prefix):
"""Find all the actions in the namespace."""
actions = {}
for key, value in iteritems(namespace):
if key.startswith(action_prefix):
actions[key[len(action_prefix):]] = analyse_action(value)
return actions
def print_usage(actions):
"""Print the usage information. (Help screen)"""
actions = actions.items()
actions.sort()
print('usage: %s <action> [<options>]' % basename(sys.argv[0]))
print(' %s --help' % basename(sys.argv[0]))
print()
print('actions:')
for name, (func, doc, arguments) in actions:
print(' %s:' % name)
for line in doc.splitlines():
print(' %s' % line)
if arguments:
print()
for arg, shortcut, default, argtype in arguments:
if isinstance(default, bool):
print(' %s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg
))
else:
print(' %-30s%-10s%s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg,
argtype, default
))
print()
def analyse_action(func):
"""Analyse a function."""
description = inspect.getdoc(func) or 'undocumented action'
arguments = []
args, varargs, kwargs, defaults = inspect.getargspec(func)
if varargs or kwargs:
raise TypeError('variable length arguments for action not allowed.')
if len(args) != len(defaults or ()):
raise TypeError('not all arguments have proper definitions')
for idx, (arg, definition) in enumerate(zip(args, defaults or ())):
if arg.startswith('_'):
raise TypeError('arguments may not start with an underscore')
if not isinstance(definition, tuple):
shortcut = None
default = definition
else:
shortcut, default = definition
argument_type = argument_types[type(default)]
if isinstance(default, bool) and default is True:
arg = 'no-' + arg
arguments.append((arg.replace('_', '-'), shortcut,
default, argument_type))
return func, description, arguments
def make_shell(init_func=None, banner=None, use_ipython=True):
"""Returns an action callback that spawns a new interactive
python shell.
:param init_func: an optional initialization function that is
called before the shell is started. The return
value of this function is the initial namespace.
:param banner: the banner that is displayed before the shell. If
not specified a generic banner is used instead.
:param use_ipython: if set to `True` ipython is used if available.
"""
if banner is None:
banner = 'Interactive Werkzeug Shell'
if init_func is None:
init_func = dict
def action(ipython=use_ipython):
"""Start a new interactive python session."""
namespace = init_func()
if ipython:
try:
try:
from IPython.frontend.terminal.embed import InteractiveShellEmbed
sh = InteractiveShellEmbed(banner1=banner)
except ImportError:
from IPython.Shell import IPShellEmbed
sh = IPShellEmbed(banner=banner)
except ImportError:
pass
else:
sh(global_ns={}, local_ns=namespace)
return
from code import interact
interact(banner, local=namespace)
return action
def make_runserver(app_factory, hostname='localhost', port=5000,
use_reloader=False, use_debugger=False, use_evalex=True,
threaded=False, processes=1, static_files=None,
extra_files=None, ssl_context=None):
"""Returns an action callback that spawns a new development server.
.. versionadded:: 0.5
`static_files` and `extra_files` was added.
..versionadded:: 0.6.1
`ssl_context` was added.
:param app_factory: a function that returns a new WSGI application.
:param hostname: the default hostname the server should listen on.
:param port: the default port of the server.
:param use_reloader: the default setting for the reloader.
:param use_evalex: the default setting for the evalex flag of the debugger.
:param threaded: the default threading setting.
:param processes: the default number of processes to start.
:param static_files: optional dict of static files.
:param extra_files: optional list of extra files to track for reloading.
:param ssl_context: optional SSL context for running server in HTTPS mode.
"""
def action(hostname=('h', hostname), port=('p', port),
reloader=use_reloader, debugger=use_debugger,
evalex=use_evalex, threaded=threaded, processes=processes):
"""Start a new development server."""
from werkzeug.serving import run_simple
app = app_factory()
run_simple(hostname, port, app, reloader, debugger, evalex,
extra_files, 1, threaded, processes,
static_files=static_files, ssl_context=ssl_context)
return action
|
apache-2.0
|
endlessm/chromium-browser
|
third_party/llvm/lldb/test/API/lang/c/enum_types/TestEnumTypes.py
|
1
|
4806
|
"""Look up enum type information and check for correct display."""
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.decorators import *
class EnumTypesTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number('main.c', '// Set break point at this line.')
def test(self):
"""Test 'image lookup -t days' and check for correct display and enum value printing."""
self.build()
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
lldbutil.run_to_source_breakpoint(
self, '// Breakpoint for bitfield', lldb.SBFileSpec("main.c"))
self.expect("fr var a", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = A$'])
self.expect("fr var b", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = B$'])
self.expect("fr var c", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = C$'])
self.expect("fr var ab", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = AB$'])
self.expect("fr var ac", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = A | C$'])
self.expect("fr var all", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = ALL$'])
# Test that an enum that doesn't match the heuristic we use in
# TypeSystemClang::DumpEnumValue, gets printed as a raw integer.
self.expect("fr var omega", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = 7$'])
# Test the behavior in case have a variable of a type considered
# 'bitfield' by the heuristic, but the value isn't actually fully
# covered by the enumerators.
self.expect("p (enum bitfield)nonsense", DATA_TYPES_DISPLAYED_CORRECTLY,
patterns=[' = B | C | 0x10$'])
# Break inside the main.
bkpt_id = lldbutil.run_break_set_by_file_and_line(
self, "main.c", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("c", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# The breakpoint should have a hit count of 1.
self.expect("breakpoint list -f", BREAKPOINT_HIT_ONCE,
substrs=[' resolved, hit count = 1'])
# Look up information about the 'days' enum type.
# Check for correct display.
self.expect("image lookup -t days", DATA_TYPES_DISPLAYED_CORRECTLY,
substrs=['enum days {',
'Monday',
'Tuesday',
'Wednesday',
'Thursday',
'Friday',
'Saturday',
'Sunday',
'kNumDays',
'}'])
enum_values = ['-4',
'Monday',
'Tuesday',
'Wednesday',
'Thursday',
'Friday',
'Saturday',
'Sunday',
'kNumDays',
'5']
# Make sure a pointer to an anonymous enum type does crash LLDB and displays correctly using
# frame variable and expression commands
self.expect(
'frame variable f.op',
DATA_TYPES_DISPLAYED_CORRECTLY,
substrs=[
'ops *',
'f.op'],
patterns=['0x0+$'])
self.expect(
'frame variable *f.op',
DATA_TYPES_DISPLAYED_CORRECTLY,
substrs=[
'ops',
'*f.op',
'<parent is NULL>'])
self.expect(
'expr f.op',
DATA_TYPES_DISPLAYED_CORRECTLY,
substrs=[
'ops *',
'$'],
patterns=['0x0+$'])
self.expect(
'expr *f.op',
DATA_TYPES_DISPLAYED_CORRECTLY,
substrs=['error:'],
error=True)
bkpt = self.target().FindBreakpointByID(bkpt_id)
for enum_value in enum_values:
self.expect(
"frame variable day",
'check for valid enumeration value',
substrs=[enum_value])
lldbutil.continue_to_breakpoint(self.process(), bkpt)
|
bsd-3-clause
|
GMadorell/programming-challenges
|
tuenti/tuenti_challenge_4/qualification/1_anonymous_poll/anonymous_poll.py
|
1
|
3240
|
#!/usr/bin/env python
"""
Problem description.
"""
from __future__ import division
import sys
import sqlite3
PATH_DATA = "students"
class AnonymousPollInstance(object):
def __init__(self):
self.gender = None
self.age = None
self.studies = None
self.academic_year = None
class AnonymousPollSolver(object):
def __init__(self, connection, output_file=sys.stdout):
self.__output_file = output_file
self.__connection = connection
def solve(self, instances):
solutions = []
for instance in instances:
solutions.append(self.solve_instance(instance))
for i, solution in enumerate(solutions, start=1):
newline_needed = True if i != len(solutions) else False
self.__output_file.write("Case #{0}: {1}{2}".format(i, solution, "\n" if newline_needed else ""))
def solve_instance(self, instance):
"""
Where the magic happens.
This method should return the solution (as a string) of the given instance.
"""
cursor = self.__connection.cursor()
cursor.execute(
"""
SELECT student_name
FROM STUDENTS
WHERE gender = ? AND age = ? AND education = ? AND academic_year = ?
""", (instance.gender, int(instance.age), instance.studies, int(instance.academic_year))
)
names = cursor.fetchall()
if len(names) > 0:
return ",".join(sorted(map(lambda item: str(item[0]), names)))
else:
return "NONE"
class AnonymousPollParser(object):
def __init__(self):
data = sys.stdin.readlines()
data = map(lambda s: s.strip(), data)
self.amount_samples = int(data[0][0])
self.data = data[1:]
self.instances = []
self.parse()
def parse(self):
"""
This method should populate the instances list.
"""
for line in self.data:
row = line.strip().split(",")
instance = AnonymousPollInstance()
instance.gender = row[0]
instance.age = row[1]
instance.studies = row[2]
instance.academic_year = row[3]
self.instances.append(instance)
def get_data_as_type(self, type_):
return map(lambda row: map(lambda element: type_(element), row), self.data)
if __name__ == "__main__":
connection = sqlite3.connect(":memory:")
cursor = connection.cursor()
cursor.execute("DROP TABLE IF EXISTS Students")
cursor.execute("""
CREATE TABLE Students
(student_name TEXT, gender TEXT, age INTEGER, education TEXT, academic_year INTEGER)
""")
with open(PATH_DATA, "r") as students_file:
for line in students_file.readlines():
name, gender, age, education, academic_year = line.strip().split(",")
cursor.execute("INSERT INTO Students VALUES (?, ?, ?, ?, ?)",
(name, gender, age, education, academic_year))
connection.commit()
parser = AnonymousPollParser()
solver = AnonymousPollSolver(connection)
solver.solve(parser.instances)
connection.close()
|
mit
|
rec/echomesh
|
code/python/external/pi3d/event/FindDevices.py
|
1
|
8501
|
import re
from Constants import *
def test_bit(nlst, b):
index = b / 32
bit = b % 32
if len(nlst) <= index:
return False
if nlst[index] & (1 << bit):
return True
else:
return False
def EvToStr(events):
s = [ ]
if test_bit(events, EV_SYN): s.append("EV_SYN")
if test_bit(events, EV_KEY): s.append("EV_KEY")
if test_bit(events, EV_REL): s.append("EV_REL")
if test_bit(events, EV_ABS): s.append("EV_ABS")
if test_bit(events, EV_MSC): s.append("EV_MSC")
if test_bit(events, EV_LED): s.append("EV_LED")
if test_bit(events, EV_SND): s.append("EV_SND")
if test_bit(events, EV_REP): s.append("EV_REP")
if test_bit(events, EV_FF): s.append("EV_FF" )
if test_bit(events, EV_PWR): s.append("EV_PWR")
if test_bit(events, EV_FF_STATUS): s.append("EV_FF_STATUS")
return s
class DeviceCapabilities(object):
def __init__(self, firstLine, filehandle):
self.EV_SYNevents = [ ]
self.EV_KEYevents = [ ]
self.EV_RELevents = [ ]
self.EV_ABSevents = [ ]
self.EV_MSCevents = [ ]
self.EV_LEDevents = [ ]
self.EV_SNDevents = [ ]
self.EV_REPevents = [ ]
self.EV_FFevents = [ ]
self.EV_PWRevents = [ ]
self.EV_FF_STATUSevents = [ ]
self.eventTypes = [ ]
match = re.search(".*Bus=([0-9A-Fa-f]+).*Vendor=([0-9A-Fa-f]+).*Product=([0-9A-Fa-f]+).*Version=([0-9A-Fa-f]+).*", firstLine)
if not match:
print "Do not understand device ID:", line
self.bus = 0
self.vendor = 0
self.product = 0
self.version = 0
else:
self.bus = int(match.group(1), base=16)
self.vendor = int(match.group(2), base=16)
self.product = int(match.group(3), base=16)
self.version = int(match.group(4), base=16)
for line in filehandle:
if len(line.strip()) == 0:
break
if line[0] == "N":
match = re.search('Name="([^"]+)"', line)
if match:
self.name = match.group(1)
else:
self.name = "UNKNOWN"
elif line[0] == "P":
match = re.search('Phys=(.+)', line)
if match:
self.phys = match.group(1)
else:
self.phys = "UNKNOWN"
elif line[0] == "S":
match = re.search('Sysfs=(.+)', line)
if match:
self.sysfs = match.group(1)
else:
self.sysfs = "UNKNOWN"
elif line[0] == "U":
match = re.search('Uniq=(.*)', line)
if match:
self.uniq = match.group(1)
else:
self.uniq = "UNKNOWN"
elif line[0] == "H":
match = re.search('Handlers=(.+)', line)
if match:
self.handlers = match.group(1).split()
else:
self.handlers = [ ]
elif line[:5] == "B: EV":
eventsNums = [int(x,base=16) for x in line [6:].split()]
eventsNums.reverse()
self.eventTypes = eventsNums
elif line[:6] == "B: KEY":
eventsNums = [int(x,base=16) for x in line [7:].split()]
eventsNums.reverse()
self.EV_KEYevents = eventsNums
elif line[:6] == "B: ABS":
eventsNums = [int(x,base=16) for x in line [7:].split()]
eventsNums.reverse()
self.EV_ABSevents = eventsNums
elif line[:6] == "B: MSC":
eventsNums = [int(x,base=16) for x in line [7:].split()]
eventsNums.reverse()
self.EV_MSCevents = eventsNums
elif line[:6] == "B: REL":
eventsNums = [int(x,base=16) for x in line [7:].split()]
eventsNums.reverse()
self.EV_RELevents = eventsNums
elif line[:6] == "B: LED":
eventsNums = [int(x,base=16) for x in line [7:].split()]
eventsNums.reverse()
self.EV_LEDevents = eventsNums
for handler in self.handlers:
if handler[:5] == "event":
self.eventIndex = int(handler[5:])
self.isMouse = False
self.isKeyboard = False
self.isJoystick = False
def doesProduce(self, eventType, eventCode):
if not test_bit(self.eventTypes, eventType):
return False
if eventType == EV_SYN and test_bit(self.EV_SYNevents, eventCode): return True
if eventType == EV_KEY and test_bit(self.EV_KEYevents, eventCode): return True
if eventType == EV_REL and test_bit(self.EV_RELevents, eventCode): return True
if eventType == EV_ABS and test_bit(self.EV_ABSevents, eventCode): return True
if eventType == EV_MSC and test_bit(self.EV_MSCevents, eventCode): return True
if eventType == EV_LED and test_bit(self.EV_LEDevents, eventCode): return True
if eventType == EV_SND and test_bit(self.EV_SNDevents, eventCode): return True
if eventType == EV_REP and test_bit(self.EV_REPevents, eventCode): return True
if eventType == EV_FF and test_bit(self.EV_FFevents, eventCode): return True
if eventType == EV_PWR and test_bit(self.EV_PWRevents, eventCode): return True
if eventType == EV_FF_STATUS and test_bit(self.EV_FF_STATUSevents, eventCode): return True
return False
def __str__(self):
return self.name+"\nBus: "+str(self.bus)+" Vendor: "+str(self.vendor)+ \
" Product: "+str(self.product)+" Version: "+str(self.version) + \
"\nPhys: " + self.phys + "\nSysfs: " + self.sysfs + "\nUniq: " + self.uniq + \
"\nHandlers: " + str(self.handlers) + " Event Index: "+ str(self.eventIndex) + \
"\nKeyboard: " + str(self.isKeyboard) + " Mouse: " + str(self.isMouse) + \
" Joystick: " + str(self.isJoystick) + \
"\nEvents: " + str(EvToStr(self.eventTypes))
deviceCapabilities = [ ]
def get_devices(filename="/proc/bus/input/devices"):
global deviceCapabilities
with open("/proc/bus/input/devices","r") as filehandle:
for line in filehandle:
if line[0] == "I":
deviceCapabilities.append(DeviceCapabilities(line, filehandle))
return deviceCapabilities
def find_devices(identifier, butNot= [ ]):
"""
finds the event indecies of all devices that have the given identifier.
The identifier is a string on the Handlers line of /proc/bus/input/devices.
Keyboards use "kbd", mice use "mouse" and joysticks (and gamepads) use "js".
Returns a list of integer indexes N, where /dev/input/eventN is the event
stream for each device.
If except is given it holds a list of tuples which the returned values should not match.
All devices of each type are returned; if you have two mice, they will both
be used.
"""
ret = [ ]
index = 0
# print "Looking for", identifier
with open("/proc/bus/input/devices","r") as filehandle:
for line in filehandle:
if line[0] == "H":
if identifier in line:
# print line
match = re.search("event([0-9]+)", line)
eventindex = match and match.group(1)
if eventindex:
for old in butNot:
if old[1] == int(eventindex):
# print "Removing", old[1]
break
else:
pass
# print "No need to remove", old[1]
else:
ret.append((index, int(eventindex)))
index += 1
return ret
if __name__ == "__main__":
devs = get_devices()
for dev in devs:
print str(dev)
print " ABS: ", [x for x in range(64) if test_bit(dev.EV_ABSevents, x)]
print " REL: ", [x for x in range(64) if test_bit(dev.EV_RELevents, x)]
print " MSC: ", [x for x in range(64) if test_bit(dev.EV_MSCevents, x)]
print " KEY: ", [x for x in range(512) if test_bit(dev.EV_KEYevents, x)]
print " LED: ", [x for x in range(64) if test_bit(dev.EV_LEDevents, x)]
print
|
mit
|
hinerm/ITK
|
Wrapping/Generators/SwigInterface/pygccxml-1.0.0/pygccxml/declarations/matchers.py
|
13
|
19100
|
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
"""
defines all "built-in" classes that implement declarations compare functionality
according to some criteria
"""
import os
import re
import types
import algorithm
import variable
import namespace
import calldef
import cpptypes
import templates
import class_declaration
from pygccxml import utils
class matcher_base_t(object):
"""matcher_base_t class defines interface for classes that will implement
compare functionality according to some criteria.
"""
def __init__( self ):
object.__init__( self )
def __call__(self, decl):
raise NotImplementedError( "matcher must always implement the __call__() method." )
def __invert__(self):
"""not-operator (~)"""
return not_matcher_t(self)
def __and__(self, other):
"""and-operator (&)"""
return and_matcher_t([self, other])
def __or__(self, other):
"""or-operator (|)"""
return or_matcher_t([self, other])
def __str__( self ):
return "base class for all matchers"
class and_matcher_t(matcher_base_t):
"""Combine several other matchers with "&".
For example: find all private functions with name XXX
C{ matcher = access_type_matcher_t( 'private' ) & calldef_matcher_t( name='XXX' ) }
"""
def __init__(self, matchers):
matcher_base_t.__init__(self)
self.matchers = matchers
def __call__(self, decl):
for matcher in self.matchers:
if not matcher(decl):
return False
return True
def __str__(self):
return " & ".join( map( lambda x: "(%s)" % str( x ), self.matchers ) )
class or_matcher_t(matcher_base_t):
"""Combine several other matchers with "|".
For example: find all functions and variables with name 'XXX'
C{ matcher = variable_matcher_t( name='XXX' ) | calldef_matcher_t( name='XXX' ) }
"""
def __init__(self, matchers):
matcher_base_t.__init__(self)
self.matchers = matchers
def __call__(self, decl):
for matcher in self.matchers:
if matcher(decl):
return True
return False
def __str__(self):
return " | ".join( map( lambda x: "(%s)" % str( x ), self.matchers ) )
class not_matcher_t(matcher_base_t):
"""Return the inverse result of matcher, using "~"
For example: find all private and protected declarations
C{ matcher = ~access_type_matcher_t( 'private' ) }
"""
def __init__(self, matcher):
matcher_base_t.__init__(self)
self.matcher = matcher
def __call__(self, decl):
return not self.matcher(decl)
def __str__(self):
return "~(%s)"%str(self.matcher)
class declaration_matcher_t( matcher_base_t ):
"""
Instance of this class will match declarations by next criteria:
- declaration name, also could be fully qualified name
Example: wstring or ::std::wstring
- declaration type
Example: L{class_t}, L{namespace_t}, L{enumeration_t}
- location within file system ( file or directory )
"""
def __init__( self, name=None, decl_type=None, header_dir=None, header_file=None ):
"""
@param decl_type: declaration type to match by. For example L{enumeration_t}.
@type decl_type: any class that derives from L{declarations.declaration_t} class
@param name: declaration name, could be full name.
@type name: str
@param header_dir: absolute directory path
@type header_dir: str
@param header_file: absolute file path
@type header_file: str
"""
#An other option is that pygccxml will create absolute path using
#os.path.abspath function. But I think this is just wrong, because abspath
#builds path using current working directory. This behavior is fragile
#and very difficult to find a bug.
matcher_base_t.__init__( self )
self.decl_type = decl_type
self.__name = None
self.__opt_is_tmpl_inst = None
self.__opt_tmpl_name = None
self.__opt_is_full_name = None
self.__decl_name_only = None
self._set_name( name )
self.header_dir = header_dir
self.header_file = header_file
if self.header_dir:
self.header_dir = utils.normalize_path( self.header_dir )
if not os.path.isabs( self.header_dir ):
raise RuntimeError( "Path to header directory should be absolute!" )
if self.header_file:
self.header_file = utils.normalize_path( self.header_file )
if not os.path.isabs( self.header_file ):
raise RuntimeError( "Path to header file should be absolute!" )
def _get_name(self):
return self.__name
def _set_name( self, name ):
self.__name = name
if not self.__name:
self.__opt_is_tmpl_inst = None
self.__opt_tmpl_name = None
self.__opt_is_full_name = None
self.__decl_name_only = None
else:
self.__opt_is_tmpl_inst = templates.is_instantiation( self.__name )
self.__opt_tmpl_name = templates.name( self.__name )
if self.__opt_is_tmpl_inst:
if '::' in self.__opt_tmpl_name:
self.__opt_is_full_name = True
self.__decl_name_only = self.__opt_tmpl_name.split('::')[-1]
else:
self.__opt_is_full_name = False
self.__decl_name_only = self.__opt_tmpl_name
self.__name = templates.normalize( name )
else:
if '::' in self.__name:
self.__opt_is_full_name = True
self.__decl_name_only = self.__name.split('::')[-1]
else:
self.__opt_is_full_name = False
self.__decl_name_only = self.__name
name = property( _get_name, _set_name )
def __str__( self ):
msg = []
if not None is self.decl_type:
msg.append( '(decl type==%s)' % self.decl_type.__name__ )
if not None is self.name:
msg.append( '(name==%s)' % self.name )
if not None is self.header_dir:
msg.append( '(header dir==%s)' % self.header_dir )
if not None is self.header_file:
msg.append( '(header file==%s)' % self.header_file )
if not msg:
msg.append( 'any' )
return ' and '.join( msg )
def __call__( self, decl ):
if not None is self.decl_type:
if not isinstance( decl, self.decl_type ):
return False
if not None is self.name:
if not self.check_name( decl ):
return False
if not None is self.header_dir and decl.location:
decl_dir = os.path.abspath( os.path.dirname( decl.location.file_name ) )
decl_dir = utils.normalize_path( decl_dir )
if decl_dir[:len(self.header_dir)] != self.header_dir:
return False
if not None is self.header_file and decl.location:
decl_file = os.path.abspath( decl.location.file_name )
decl_file = utils.normalize_path( decl_file )
if decl_file != self.header_file:
return False
return True
def check_name( self, decl ):
assert not None is self.name
if self.__opt_is_tmpl_inst:
if not self.__opt_is_full_name:
if self.name != templates.normalize( decl.name ) \
and self.name != templates.normalize( decl.partial_name ):
return False
else:
if self.name != templates.normalize( algorithm.full_name( decl, with_defaults=True ) ) \
and self.name != templates.normalize( algorithm.full_name( decl, with_defaults=False ) ):
return False
else:
if not self.__opt_is_full_name:
if self.name != decl.name and self.name != decl.partial_name:
return False
else:
if self.name != algorithm.full_name( decl, with_defaults=True ) \
and self.name != algorithm.full_name( decl, with_defaults=False ):
return False
return True
def is_full_name(self):
return self.__opt_is_full_name
def _get_decl_name_only(self):
return self.__decl_name_only
decl_name_only = property( _get_decl_name_only )
class variable_matcher_t( declaration_matcher_t ):
"""
Instance of this class will match variables by next criteria:
- L{declaration_matcher_t} criteria
- variable type. Example: L{int_t} or 'int'
"""
def __init__( self, name=None, type=None, header_dir=None, header_file=None ):
"""
@param type: variable type
@type type: string or instance of L{type_t} derived class
"""
declaration_matcher_t.__init__( self
, name=name
, decl_type=variable.variable_t
, header_dir=header_dir
, header_file=header_file )
self.type = type
def __call__( self, decl ):
if not super( variable_matcher_t, self ).__call__( decl ):
return False
if not None is self.type:
if isinstance( self.type, cpptypes.type_t ):
if self.type != decl.type:
return False
else:
if self.type != decl.type.decl_string:
return False
return True
def __str__( self ):
msg = [ super( variable_matcher_t, self ).__str__() ]
if msg == [ 'any' ]:
msg = []
if not None is self.type:
msg.append( '(value type==%s)' % str(self.type) )
if not msg:
msg.append( 'any' )
return ' and '.join( msg )
class namespace_matcher_t( declaration_matcher_t ):
"""Instance of this class will match namespaces by name."""
def __init__( self, name=None ):
declaration_matcher_t.__init__( self, name=name, decl_type=namespace.namespace_t)
def __call__( self, decl ):
if self.name and decl.name == '':
#unnamed namespace have same name as thier parent, we should prevent
#this happens. The price is: user should search for unnamed namespace
#directly.
return False
return super( namespace_matcher_t, self ).__call__( decl )
class calldef_matcher_t( declaration_matcher_t ):
"""
Instance of this class will match callable by next criteria:
- L{declaration_matcher_t} criteria
- return type. Example: L{int_t} or 'int'
- argument types
"""
def __init__( self, name=None, return_type=None, arg_types=None, decl_type=None, header_dir=None, header_file=None):
"""
@param return_type: callable return type
@type return_type: string or instance of L{type_t} derived class
@param arg_types: list of function argument types. arg_types can contain.
Any item within the list could be string or instance of L{type_t} derived
class. If you don't want some argument to participate in match you can
put None. For example:
C{ calldef_matcher_t( arg_types=[ 'int &', None ] ) }
will match all functions that takes 2 arguments, where the first one is
reference to integer and second any
@type arg_types: list
"""
if None is decl_type:
decl_type = calldef.calldef_t
declaration_matcher_t.__init__( self
, name=name
, decl_type=decl_type
, header_dir=header_dir
, header_file=header_file )
self.return_type = return_type
self.arg_types = arg_types
def __call__( self, decl ):
if not super( calldef_matcher_t, self ).__call__( decl ):
return False
if not None is self.return_type \
and not self.__compare_types( self.return_type, decl.return_type ):
return False
if self.arg_types:
if isinstance( self.arg_types, (types.ListType, types.TupleType)):
if len(self.arg_types) != len( decl.arguments ):
return False
for type_or_str, arg in zip( self.arg_types, decl.arguments ):
if None == type_or_str:
continue
else:
if not self.__compare_types( type_or_str, arg.type ):
return False
return True
def __compare_types( self, type_or_str, type ):
assert type_or_str
if type is None:
return False
if isinstance( type_or_str, cpptypes.type_t ):
if type_or_str != type:
return False
else:
if type_or_str != type.decl_string:
return False
return True
def __str__( self ):
msg = [ super( calldef_matcher_t, self ).__str__() ]
if msg == [ 'any' ]:
msg = []
if not None is self.return_type:
msg.append( '(return type==%s)' % str(self.return_type) )
if self.arg_types:
for i in range( len( self.arg_types ) ):
if self.arg_types[i] is None:
msg.append( '(arg %d type==any)' % i )
else:
msg.append( '(arg %d type==%s)' % ( i, str( self.arg_types[i] ) ) )
if not msg:
msg.append( 'any' )
return ' and '.join( msg )
class operator_matcher_t( calldef_matcher_t ):
"""
Instance of this class will match operators by next criteria:
- L{calldef_matcher_t} criteria
- operator symbol: =, !=, (), [] and etc
"""
def __init__( self, name=None, symbol=None, return_type=None, arg_types=None, decl_type=None, header_dir=None, header_file=None):
"""
@param symbol: operator symbol
@type symbol: str
"""
if None is decl_type:
decl_type = calldef.operator_t
calldef_matcher_t.__init__( self
, name=name
, return_type=return_type
, arg_types=arg_types
, decl_type=decl_type
, header_dir=header_dir
, header_file=header_file)
self.symbol = symbol
def __call__( self, decl ):
if not super( operator_matcher_t, self ).__call__( decl ):
return False
if not None is self.symbol:
if self.symbol != decl.symbol:
return False
return True
def __str__( self ):
msg = [ super( operator_matcher_t, self ).__str__() ]
if msg == [ 'any' ]:
msg = []
if not None is self.symbol:
msg.append( '(symbol==%s)' % str(self.symbol) )
if not msg:
msg.append( 'any' )
return ' and '.join( msg )
class regex_matcher_t( matcher_base_t ):
"""
Instance of this class will match declaration using regular expression.
User should supply a function that will extract from declaration desired
information as string. Later, this matcher will match that string using
user regular expression.
"""
def __init__( self, regex, function=None ):
"""
@param regex: regular expression
@type regex: string, an instance of this class will compile it for you
@param function: function that will be called to get an information from
declaration as string. As input this function takes 1 argument: reference
to declaration. Return value should be string. If function is None, then
the matcher will use declaration name.
"""
matcher_base_t.__init__(self)
self.regex = re.compile( regex )
self.function = function
if None is self.function:
self.function = lambda decl: decl.name
def __call__( self, decl ):
text = self.function( decl )
return bool( self.regex.match( text ) )
def __str__( self ):
return '(regex=%s)' % self.regex
class access_type_matcher_t( matcher_base_t ):
"""
Instance of this class will match declaration by its access type: public,
private or protected. If declarations does not have access type, for example
free function, then False will be returned.
"""
def __init__( self, access_type ):
"""
@param access_type: declaration access type
@type access_type: L{ACCESS_TYPES} defines few consts for your convinience.
Any way you can pass public, private or protected as argument to this function
"""
matcher_base_t.__init__( self )
self.access_type = access_type
def __call__( self, decl ):
if not isinstance( decl.parent, class_declaration.class_t ):
return False
return self.access_type == decl.parent.find_out_member_access_type( decl )
def __str__( self ):
return '(access type=%s)' % self.access_type
class virtuality_type_matcher_t( matcher_base_t ):
"""
Instance of this class will match declaration by its virtuality type: not virtual,
virtual or pure virtual. If declarations does not have virtuality type, for example
free function, then False will be returned.
"""
def __init__( self, virtuality_type ):
"""
@param access_type: declaration access type
@type access_type: L{VIRTUALITY_TYPES} defines few consts for your convinience.
"""
matcher_base_t.__init__( self )
self.virtuality_type = virtuality_type
def __call__( self, decl ):
if not isinstance( decl.parent, class_declaration.class_t ):
return False
return self.virtuality_type == decl.virtuality
def __str__( self ):
return '(virtuality type=%s)' % self.virtuality_type
class custom_matcher_t( matcher_base_t ):
"""
Instance of this class will match declaration by user custom criteria.
"""
def __init__( self, function ):
"""
@param function: callable, that takes single argument - declaration instance
should return True or False
"""
matcher_base_t.__init__( self )
self.function = function
def __call__( self, decl ):
return bool( self.function( decl ) )
def __str__( self ):
return '(user criteria)'
|
apache-2.0
|
perlygatekeeper/glowing-robot
|
Project_Euler/11_find_largest_product_in_grid/find_largest_product_in_grid.py
|
1
|
4585
|
#!/opt/local/bin/python
# Python program to find largest product of any straight-line
# sequence of length four in any direction amount
# horizontal, vertical or diagonal
import sys
import math
import timeit
import time
grid_string ='''
08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48'''
def extractGrid(grid_string):
grid_string = grid_string.replace("\n","",1)
print(grid_string)
print(" ")
grid=[]
for row_string in grid_string.splitlines():
row=[]
for number in row_string.split():
row.append(int(number))
grid.append(row)
# grid.append(row_string.split())
# print (type(grid[0]))
# print (grid[0])
return grid
def findLargestProduct(grid):
record = {}
record["product"] = 0
direction = 'veritical'
for row in range(20):
for column in range(17):
product = grid[row][column] * grid[row][column+1] * grid[row][column+2] * grid[row][column+3]
if product > record["product"]:
record["product"] = product
record["row"] = row
record["column"] = column
record["direction"] = direction
record["numbers"] = [ grid[row][column], grid[row][column+1], grid[row][column+2], grid[row][column+3] ]
direction = 'horizontal'
for row in range(17):
for column in range(20):
product = grid[row][column] * grid[row+1][column] * grid[row+2][column] * grid[row+3][column]
if product > record["product"]:
record["product"] = product
record["row"] = row
record["column"] = column
record["direction"] = direction
record["numbers"] = [ grid[row][column], grid[row+1][column], grid[row+2][column], grid[row+3][column] ]
direction = 'diagonal_upper_right'
for row in range(3,20):
for column in range(17):
product = grid[row][column] * grid[row-1][column+1] * grid[row-2][column+2] * grid[row-3][column+3]
if product > record["product"]:
record["product"] = product
record["row"] = row
record["column"] = column
record["direction"] = direction
record["numbers"] = [ grid[row][column], grid[row-1][column+1], grid[row-2][column+2], grid[row-3][column+3] ]
direction = 'diagonal_lower_right'
for row in range(17):
for column in range(17):
product = grid[row][column] * grid[row+1][column+1] * grid[row+2][column+2] * grid[row+3][column+3]
if product > record["product"]:
record["product"] = product
record["row"] = row
record["column"] = column
record["direction"] = direction
record["numbers"] = [ grid[row][column], grid[row+1][column+1], grid[row+2][column+2], grid[row+3][column+3] ]
return ( record )
start_time = timeit.default_timer()
grid = extractGrid(grid_string)
record = findLargestProduct(grid)
print("Found product %d starting at (%d,%d) and moving %s, which took %f seconds"
% ( record["product"], record["row"]+1, record["column"]+1, record["direction"], ( timeit.default_timer() - start_time ) ) )
print("The four numbers are: ", end="")
for number in record["numbers"]:
print(f"{number} ",end="")
print(" ")
|
artistic-2.0
|
hgl888/blink-crosswalk-efl
|
Tools/Scripts/webkitpy/style/checkers/xcodeproj_unittest.py
|
48
|
3070
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for xcodeproj.py."""
import unittest
import xcodeproj
class TestErrorHandler(object):
"""Error handler for XcodeProjectFileChecker unittests"""
def __init__(self, handler):
self.handler = handler
def turn_off_line_filtering(self):
pass
def __call__(self, line_number, category, confidence, message):
self.handler(self, line_number, category, confidence, message)
return True
class XcodeProjectFileCheckerTest(unittest.TestCase):
"""Tests XcodeProjectFileChecker class."""
def assert_no_error(self, lines):
def handler(error_handler, line_number, category, confidence, message):
self.fail('Unexpected error: %d %s %d %s' % (line_number, category, confidence, message))
error_handler = TestErrorHandler(handler)
checker = xcodeproj.XcodeProjectFileChecker('', error_handler)
checker.check(lines)
def assert_error(self, lines, expected_message):
self.had_error = False
def handler(error_handler, line_number, category, confidence, message):
self.assertEqual(expected_message, message)
self.had_error = True
error_handler = TestErrorHandler(handler)
checker = xcodeproj.XcodeProjectFileChecker('', error_handler)
checker.check(lines)
self.assertTrue(self.had_error, '%s should have error: %s.' % (lines, expected_message))
def test_detect_development_region(self):
self.assert_no_error(['developmentRegion = English;'])
self.assert_error([''], 'Missing "developmentRegion = English".')
self.assert_error(['developmentRegion = Japanese;'],
'developmentRegion is not English.')
|
bsd-3-clause
|
Nolski/olympia
|
apps/constants/search.py
|
16
|
2840
|
# These two dicts are mapping between language codes in zamboni and language
# analyzers in elasticsearch.
#
# Each key value of ANALYZER_MAP is language analyzer supported by
# elasticsearch. See
# http://www.elasticsearch.org/guide/reference/index-modules/analysis/lang-analyzer.html
#
# Each value of ANALYZER_MAP is a list which is supported by the key analyzer.
# All values are picked from AMO_LANGUAGES in settings.py.
#
# The rows commented out are that the language is not supported by
# elasticsearch yet. We should update it when elasticsearch supports new
# analyzer for the language.
SEARCH_ANALYZER_MAP = {
#'': ['af'], # Afrikaans
'arabic': ['ar'],
'bulgarian': ['bg'],
'catalan': ['ca'],
'czech': ['cs'],
'danish': ['da'],
'german': ['de'],
'greek': ['el'],
'english': ['en-us'],
'spanish': ['es'],
'basque': ['eu'],
'persian': ['fa'],
'finnish': ['fi'],
'french': ['fr'],
#'': ['ga-ie'], # Gaelic - Ireland
#'': ['he'], # Hebrew
'hungarian': ['hu'],
'indonesian': ['id'],
'italian': ['it'],
'cjk': ['ja', 'ko'],
#'': ['mn'], # Mongolian
'dutch': ['nl'],
# Polish requires the Elasticsearch plugin:
# https://github.com/elasticsearch/elasticsearch-analysis-stempel
'polish': ['pl'],
'brazilian': ['pt-br'],
'portuguese': ['pt-pt'],
'romanian': ['ro'],
'russian': ['ru'],
#'': ['sk'], # Slovak
#'': ['sl'], # Slovenian
#'': ['sq'], # Albanian
'swedish': ['sv-se'],
#'': ['uk'], # Ukrainian
#'': ['vi'], # Vietnamese
'chinese': ['zh-cn', 'zh-tw'],
}
# This dict is an inverse mapping of ANALYZER_MAP.
SEARCH_LANGUAGE_TO_ANALYZER = {}
for analyzer, languages in SEARCH_ANALYZER_MAP.items():
for language in languages:
SEARCH_LANGUAGE_TO_ANALYZER[language] = analyzer
# List of analyzers that require a plugin. Depending on settings.ES_USE_PLUGINS
# we may disable or bypass these.
SEARCH_ANALYZER_PLUGINS = [
'polish',
]
# Which stemmer to use for each langauge.
#
# Note: We use the keys of this dict for supported stop words, also, which is
# specified as, e.g., '_english_'.
STEMMER_MAP = {
'arabic': 'arabic',
'basque': 'basque',
'brazilian': 'brazilian',
'bulgarian': 'bulgarian',
'catalan': 'catalan',
'czech': 'czech',
'danish': 'danish',
'dutch': 'dutch',
'english': 'minimal_english',
'finnish': 'light_finish', # Yes, this is misspelled in ES.
'french': 'light_french',
'german': 'light_german',
'greek': 'greek',
'hungarian': 'light_hungarian',
'indonesian': 'indonesian',
'italian': 'light_italian',
'portuguese': 'light_portuguese',
'romanian': 'romanian',
'russian': 'russian',
'spanish': 'light_spanish',
'swedish': 'light_swedish',
}
|
bsd-3-clause
|
Velociraptor85/pyload
|
module/InitHomeDir.py
|
40
|
2675
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: RaNaN
This modules inits working directories and global variables, pydir and homedir
"""
from os import makedirs, path, chdir
from os.path import join
import sys
from sys import argv, platform
import __builtin__
__builtin__.owd = path.abspath("") #original working directory
__builtin__.pypath = path.abspath(path.join(__file__, "..", ".."))
sys.path.append(join(pypath, "module", "lib"))
homedir = ""
if platform == 'nt':
homedir = path.expanduser("~")
if homedir == "~":
import ctypes
CSIDL_APPDATA = 26
_SHGetFolderPath = ctypes.windll.shell32.SHGetFolderPathW
_SHGetFolderPath.argtypes = [ctypes.wintypes.HWND,
ctypes.c_int,
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD, ctypes.wintypes.LPCWSTR]
path_buf = ctypes.wintypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
result = _SHGetFolderPath(0, CSIDL_APPDATA, 0, 0, path_buf)
homedir = path_buf.value
else:
homedir = path.expanduser("~")
__builtin__.homedir = homedir
args = " ".join(argv[1:])
# dirty method to set configdir from commandline arguments
if "--configdir=" in args:
pos = args.find("--configdir=")
end = args.find("-", pos + 12)
if end == -1:
configdir = args[pos + 12:].strip()
else:
configdir = args[pos + 12:end].strip()
elif path.exists(path.join(pypath, "module", "config", "configdir")):
f = open(path.join(pypath, "module", "config", "configdir"), "rb")
c = f.read().strip()
f.close()
configdir = path.join(pypath, c)
else:
if platform in ("posix", "linux2"):
configdir = path.join(homedir, ".pyload")
else:
configdir = path.join(homedir, "pyload")
if not path.exists(configdir):
makedirs(configdir, 0700)
__builtin__.configdir = configdir
chdir(configdir)
#print "Using %s as working directory." % configdir
|
gpl-3.0
|
SmithsonianEnterprises/django-cms
|
menus/base.py
|
47
|
1651
|
# -*- coding: utf-8 -*-
from django.utils.encoding import smart_str
class Menu(object):
namespace = None
def __init__(self):
if not self.namespace:
self.namespace = self.__class__.__name__
def get_nodes(self, request):
"""
should return a list of NavigationNode instances
"""
raise NotImplementedError
class Modifier(object):
def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb):
pass
class NavigationNode(object):
def __init__(self, title, url, id, parent_id=None, parent_namespace=None,
attr=None, visible=True):
self.children = [] # do not touch
self.parent = None # do not touch, code depends on this
self.namespace = None # TODO: Assert why we need this and above
self.title = title
self.url = url
self.id = id
self.parent_id = parent_id
self.parent_namespace = parent_namespace
self.visible = visible
self.attr = attr or {} # To avoid declaring a dict in defaults...
def __repr__(self):
return "<Navigation Node: %s>" % smart_str(self.title)
def get_menu_title(self):
return self.title
def get_absolute_url(self):
return self.url
def get_attribute(self, name):
return self.attr.get(name, None)
def get_descendants(self):
return sum(([node] + node.get_descendants() for node in self.children), [])
def get_ancestors(self):
if getattr(self, 'parent', None):
return [self.parent] + self.parent.get_ancestors()
else:
return []
|
bsd-3-clause
|
Pexego/odoo
|
addons/l10n_us/__openerp__.py
|
341
|
1763
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'United States - Chart of accounts',
'version': '1.1',
'author': 'OpenERP SA',
'category': 'Localization/Account Charts',
'description': """
United States - Chart of accounts.
==================================
""",
'website': 'http://www.openerp.com',
'depends': ['account_chart', 'account_anglo_saxon'],
'data': [
'l10n_us_account_type.xml',
'account_chart_template.xml',
'account.account.template.csv',
'account_tax_code_template.xml',
'account_tax_template.xml',
'account_chart_template_after.xml',
'l10n_us_wizard.xml'
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
mvaled/OpenUpgrade
|
openerp/addons/test_impex/tests/test_load.py
|
350
|
44525
|
# -*- coding: utf-8 -*-
import json
import pkgutil
import unittest2
import openerp.modules.registry
import openerp
from openerp.tests import common
from openerp.tools.misc import mute_logger
def message(msg, type='error', from_=0, to_=0, record=0, field='value', **kwargs):
return dict(kwargs,
type=type, rows={'from': from_, 'to': to_}, record=record,
field=field, message=msg)
def moreaction(**kwargs):
return dict(kwargs,
type='ir.actions.act_window',
target='new',
view_mode='tree,form',
view_type='form',
views=[(False, 'tree'), (False, 'form')],
help=u"See all possible values")
def values(seq, field='value'):
return [item[field] for item in seq]
class ImporterCase(common.TransactionCase):
model_name = False
def __init__(self, *args, **kwargs):
super(ImporterCase, self).__init__(*args, **kwargs)
self.model = None
def setUp(self):
super(ImporterCase, self).setUp()
self.model = self.registry(self.model_name)
self.registry('ir.model.data').clear_caches()
def import_(self, fields, rows, context=None):
return self.model.load(
self.cr, openerp.SUPERUSER_ID, fields, rows, context=context)
def read(self, fields=('value',), domain=(), context=None):
return self.model.read(
self.cr, openerp.SUPERUSER_ID,
self.model.search(self.cr, openerp.SUPERUSER_ID, domain, context=context),
fields=fields, context=context)
def browse(self, domain=(), context=None):
return self.model.browse(
self.cr, openerp.SUPERUSER_ID,
self.model.search(self.cr, openerp.SUPERUSER_ID, domain, context=context),
context=context)
def xid(self, record):
ModelData = self.registry('ir.model.data')
ids = ModelData.search(
self.cr, openerp.SUPERUSER_ID,
[('model', '=', record._name), ('res_id', '=', record.id)])
if ids:
d = ModelData.read(
self.cr, openerp.SUPERUSER_ID, ids, ['name', 'module'])[0]
if d['module']:
return '%s.%s' % (d['module'], d['name'])
return d['name']
name = record.name_get()[0][1]
# fix dotted name_get results, otherwise xid lookups blow up
name = name.replace('.', '-')
ModelData.create(self.cr, openerp.SUPERUSER_ID, {
'name': name,
'model': record._name,
'res_id': record.id,
'module': '__test__'
})
return '__test__.' + name
def add_translations(self, name, type, code, *tnx):
Lang = self.registry('res.lang')
if not Lang.search(self.cr, openerp.SUPERUSER_ID, [('code', '=', code)]):
Lang.create(self.cr, openerp.SUPERUSER_ID, {
'name': code,
'code': code,
'translatable': True,
'date_format': '%d.%m.%Y',
'decimal_point': ',',
})
Translations = self.registry('ir.translation')
for source, value in tnx:
Translations.create(self.cr, openerp.SUPERUSER_ID, {
'name': name,
'lang': code,
'type': type,
'src': source,
'value': value,
'state': 'translated',
})
class test_ids_stuff(ImporterCase):
model_name = 'export.integer'
def test_create_with_id(self):
result = self.import_(['.id', 'value'], [['42', '36']])
self.assertIs(result['ids'], False)
self.assertEqual(result['messages'], [{
'type': 'error',
'rows': {'from': 0, 'to': 0},
'record': 0,
'field': '.id',
'message': u"Unknown database identifier '42'",
}])
def test_create_with_xid(self):
result = self.import_(['id', 'value'], [['somexmlid', '42']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual(
'somexmlid',
self.xid(self.browse()[0]))
def test_update_with_id(self):
id = self.model.create(self.cr, openerp.SUPERUSER_ID, {'value': 36})
self.assertEqual(
36,
self.model.browse(self.cr, openerp.SUPERUSER_ID, id).value)
result = self.import_(['.id', 'value'], [[str(id), '42']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual(
[42], # updated value to imported
values(self.read()))
def test_update_with_xid(self):
self.import_(['id', 'value'], [['somexmlid', '36']])
self.assertEqual([36], values(self.read()))
self.import_(['id', 'value'], [['somexmlid', '1234567']])
self.assertEqual([1234567], values(self.read()))
class test_boolean_field(ImporterCase):
model_name = 'export.boolean'
def test_empty(self):
self.assertEqual(
self.import_(['value'], []),
{'ids': [], 'messages': []})
def test_exported(self):
result = self.import_(['value'], [['False'], ['True'], ])
self.assertEqual(len(result['ids']), 2)
self.assertFalse(result['messages'])
records = self.read()
self.assertEqual([
False,
True,
], values(records))
def test_falses(self):
for lang, source, value in [('fr_FR', 'no', u'non'),
('de_DE', 'no', u'nein'),
('ru_RU', 'no', u'нет'),
('nl_BE', 'false', u'vals'),
('lt_LT', 'false', u'klaidingas')]:
self.add_translations('test_import.py', 'code', lang, (source, value))
falses = [[u'0'], [u'no'], [u'false'], [u'FALSE'], [u''],
[u'non'], # no, fr
[u'nein'], # no, de
[u'нет'], # no, ru
[u'vals'], # false, nl
[u'klaidingas'], # false, lt,
]
result = self.import_(['value'], falses)
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), len(falses))
self.assertEqual([False] * len(falses), values(self.read()))
def test_trues(self):
trues = [['None'], ['nil'], ['()'], ['f'], ['#f'],
# Problem: OpenOffice (and probably excel) output localized booleans
['VRAI'], ['ok'], ['true'], ['yes'], ['1'], ]
result = self.import_(['value'], trues)
self.assertEqual(len(result['ids']), 10)
self.assertEqual(result['messages'], [
message(u"Unknown value '%s' for boolean field 'unknown', assuming 'yes'" % v[0],
moreinfo=u"Use '1' for yes and '0' for no",
type='warning', from_=i, to_=i, record=i)
for i, v in enumerate(trues)
if v[0] not in ('true', 'yes', '1')
])
self.assertEqual(
[True] * 10,
values(self.read()))
class test_integer_field(ImporterCase):
model_name = 'export.integer'
def test_none(self):
self.assertEqual(
self.import_(['value'], []),
{'ids': [], 'messages': []})
def test_empty(self):
result = self.import_(['value'], [['']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual(
[False],
values(self.read()))
def test_zero(self):
result = self.import_(['value'], [['0']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
result = self.import_(['value'], [['-0']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual([False, False], values(self.read()))
def test_positives(self):
result = self.import_(['value'], [
['1'],
['42'],
[str(2**31-1)],
['12345678']
])
self.assertEqual(len(result['ids']), 4)
self.assertFalse(result['messages'])
self.assertEqual([
1, 42, 2**31-1, 12345678
], values(self.read()))
def test_negatives(self):
result = self.import_(['value'], [
['-1'],
['-42'],
[str(-(2**31 - 1))],
[str(-(2**31))],
['-12345678']
])
self.assertEqual(len(result['ids']), 5)
self.assertFalse(result['messages'])
self.assertEqual([
-1, -42, -(2**31 - 1), -(2**31), -12345678
], values(self.read()))
@mute_logger('openerp.sql_db', 'openerp.models')
def test_out_of_range(self):
result = self.import_(['value'], [[str(2**31)]])
self.assertIs(result['ids'], False)
self.assertEqual(result['messages'], [{
'type': 'error',
'rows': {'from': 0, 'to': 0},
'record': 0,
'message': "integer out of range\n"
}])
result = self.import_(['value'], [[str(-2**32)]])
self.assertIs(result['ids'], False)
self.assertEqual(result['messages'], [{
'type': 'error',
'rows': {'from': 0, 'to': 0},
'record': 0,
'message': "integer out of range\n"
}])
def test_nonsense(self):
result = self.import_(['value'], [['zorglub']])
self.assertIs(result['ids'], False)
self.assertEqual(result['messages'], [{
'type': 'error',
'rows': {'from': 0, 'to': 0},
'record': 0,
'field': 'value',
'message': u"'zorglub' does not seem to be an integer for field 'unknown'",
}])
class test_float_field(ImporterCase):
model_name = 'export.float'
def test_none(self):
self.assertEqual(
self.import_(['value'], []),
{'ids': [], 'messages': []})
def test_empty(self):
result = self.import_(['value'], [['']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual(
[False],
values(self.read()))
def test_zero(self):
result = self.import_(['value'], [['0']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
result = self.import_(['value'], [['-0']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual([False, False], values(self.read()))
def test_positives(self):
result = self.import_(['value'], [
['1'],
['42'],
[str(2**31-1)],
['12345678'],
[str(2**33)],
['0.000001'],
])
self.assertEqual(len(result['ids']), 6)
self.assertFalse(result['messages'])
self.assertEqual([
1, 42, 2**31-1, 12345678, 2.0**33, .000001
], values(self.read()))
def test_negatives(self):
result = self.import_(['value'], [
['-1'],
['-42'],
[str(-2**31 + 1)],
[str(-2**31)],
['-12345678'],
[str(-2**33)],
['-0.000001'],
])
self.assertEqual(len(result['ids']), 7)
self.assertFalse(result['messages'])
self.assertEqual([
-1, -42, -(2**31 - 1), -(2**31), -12345678, -2.0**33, -.000001
], values(self.read()))
def test_nonsense(self):
result = self.import_(['value'], [['foobar']])
self.assertIs(result['ids'], False)
self.assertEqual(result['messages'], [
message(u"'foobar' does not seem to be a number for field 'unknown'")])
class test_string_field(ImporterCase):
model_name = 'export.string.bounded'
def test_empty(self):
result = self.import_(['value'], [['']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual([False], values(self.read()))
def test_imported(self):
result = self.import_(['value'], [
[u'foobar'],
[u'foobarbaz'],
[u'Með suð í eyrum við spilum endalaust'],
[u"People 'get' types. They use them all the time. Telling "
u"someone he can't pound a nail with a banana doesn't much "
u"surprise him."]
])
self.assertEqual(len(result['ids']), 4)
self.assertFalse(result['messages'])
self.assertEqual([
u"foobar",
u"foobarbaz",
u"Með suð í eyrum ",
u"People 'get' typ",
], values(self.read()))
class test_unbound_string_field(ImporterCase):
model_name = 'export.string'
def test_imported(self):
result = self.import_(['value'], [
[u'í dag viðrar vel til loftárása'],
# ackbar.jpg
[u"If they ask you about fun, you tell them – fun is a filthy"
u" parasite"]
])
self.assertEqual(len(result['ids']), 2)
self.assertFalse(result['messages'])
self.assertEqual([
u"í dag viðrar vel til loftárása",
u"If they ask you about fun, you tell them – fun is a filthy parasite"
], values(self.read()))
class test_required_string_field(ImporterCase):
model_name = 'export.string.required'
@mute_logger('openerp.sql_db', 'openerp.models')
def test_empty(self):
result = self.import_(['value'], [[]])
self.assertEqual(result['messages'], [message(
u"Missing required value for the field 'unknown' (value)")])
self.assertIs(result['ids'], False)
@mute_logger('openerp.sql_db', 'openerp.models')
def test_not_provided(self):
result = self.import_(['const'], [['12']])
self.assertEqual(result['messages'], [message(
u"Missing required value for the field 'unknown' (value)")])
self.assertIs(result['ids'], False)
class test_text(ImporterCase):
model_name = 'export.text'
def test_empty(self):
result = self.import_(['value'], [['']])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual([False], values(self.read()))
def test_imported(self):
s = (u"Breiðskífa er notað um útgefna hljómplötu sem inniheldur "
u"stúdíóupptökur frá einum flytjanda. Breiðskífur eru oftast "
u"milli 25-80 mínútur og er lengd þeirra oft miðuð við 33⅓ "
u"snúninga 12 tommu vínylplötur (sem geta verið allt að 30 mín "
u"hvor hlið).\n\nBreiðskífur eru stundum tvöfaldar og eru þær þá"
u" gefnar út á tveimur geisladiskum eða tveimur vínylplötum.")
result = self.import_(['value'], [[s]])
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
self.assertEqual([s], values(self.read()))
class test_selection(ImporterCase):
model_name = 'export.selection'
translations_fr = [
("Foo", "tete"),
("Bar", "titi"),
("Qux", "toto"),
]
def test_imported(self):
result = self.import_(['value'], [
['Qux'],
['Bar'],
['Foo'],
['2'],
])
self.assertEqual(len(result['ids']), 4)
self.assertFalse(result['messages'])
self.assertEqual([3, 2, 1, 2], values(self.read()))
def test_imported_translated(self):
self.add_translations(
'export.selection,value', 'selection', 'fr_FR', *self.translations_fr)
result = self.import_(['value'], [
['toto'],
['tete'],
['titi'],
], context={'lang': 'fr_FR'})
self.assertEqual(len(result['ids']), 3)
self.assertFalse(result['messages'])
self.assertEqual([3, 1, 2], values(self.read()))
result = self.import_(['value'], [['Foo']], context={'lang': 'fr_FR'})
self.assertEqual(len(result['ids']), 1)
self.assertFalse(result['messages'])
def test_invalid(self):
result = self.import_(['value'], [['Baz']])
self.assertIs(result['ids'], False)
self.assertEqual(result['messages'], [message(
u"Value 'Baz' not found in selection field 'unknown'",
moreinfo="Foo Bar Qux 4".split())])
result = self.import_(['value'], [[42]])
self.assertIs(result['ids'], False)
self.assertEqual(result['messages'], [message(
u"Value '42' not found in selection field 'unknown'",
moreinfo="Foo Bar Qux 4".split())])
class test_selection_with_default(ImporterCase):
model_name = 'export.selection.withdefault'
def test_empty(self):
""" Empty cells should set corresponding field to False
"""
result = self.import_(['value'], [['']])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
self.assertEqual(
values(self.read()),
[False])
def test_default(self):
""" Non-provided cells should set corresponding field to default
"""
result = self.import_(['const'], [['42']])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
self.assertEqual(
values(self.read()),
[2])
class test_selection_function(ImporterCase):
model_name = 'export.selection.function'
translations_fr = [
("Corge", "toto"),
("Grault", "titi"),
("Wheee", "tete"),
("Moog", "tutu"),
]
def test_imported(self):
""" import uses fields_get, so translates import label (may or may not
be good news) *and* serializes the selection function to reverse it:
import does not actually know that the selection field uses a function
"""
# NOTE: conflict between a value and a label => pick first
result = self.import_(['value'], [
['3'],
["Grault"],
])
self.assertEqual(len(result['ids']), 2)
self.assertFalse(result['messages'])
self.assertEqual(
[3, 1],
values(self.read()))
def test_translated(self):
""" Expects output of selection function returns translated labels
"""
self.add_translations(
'export.selection,value', 'selection', 'fr_FR', *self.translations_fr)
result = self.import_(['value'], [
['titi'],
['tete'],
], context={'lang': 'fr_FR'})
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 2)
self.assertEqual(values(self.read()), [1, 2])
result = self.import_(['value'], [['Wheee']], context={'lang': 'fr_FR'})
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
class test_m2o(ImporterCase):
model_name = 'export.many2one'
def test_by_name(self):
# create integer objects
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 36})
# get its name
name1 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id1]))[integer_id1]
name2 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id2]))[integer_id2]
result = self.import_(['value'], [
# import by name_get
[name1],
[name1],
[name2],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 3)
# correct ids assigned to corresponding records
self.assertEqual([
(integer_id1, name1),
(integer_id1, name1),
(integer_id2, name2),],
values(self.read()))
def test_by_xid(self):
ExportInteger = self.registry('export.integer')
integer_id = ExportInteger.create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
xid = self.xid(ExportInteger.browse(
self.cr, openerp.SUPERUSER_ID, [integer_id])[0])
result = self.import_(['value/id'], [[xid]])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
b = self.browse()
self.assertEqual(42, b[0].value.value)
def test_by_id(self):
integer_id = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
result = self.import_(['value/.id'], [[integer_id]])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
b = self.browse()
self.assertEqual(42, b[0].value.value)
def test_by_names(self):
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
name1 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id1]))[integer_id1]
name2 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id2]))[integer_id2]
# names should be the same
self.assertEqual(name1, name2)
result = self.import_(['value'], [[name2]])
self.assertEqual(
result['messages'],
[message(u"Found multiple matches for field 'unknown' (2 matches)",
type='warning')])
self.assertEqual(len(result['ids']), 1)
self.assertEqual([
(integer_id1, name1)
], values(self.read()))
def test_fail_by_implicit_id(self):
""" Can't implicitly import records by id
"""
# create integer objects
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 36})
# Because name_search all the things. Fallback schmallback
result = self.import_(['value'], [
# import by id, without specifying it
[integer_id1],
[integer_id2],
[integer_id1],
])
self.assertEqual(result['messages'], [
message(u"No matching record found for name '%s' in field 'unknown'" % id,
from_=index, to_=index, record=index,
moreinfo=moreaction(res_model='export.integer'))
for index, id in enumerate([integer_id1, integer_id2, integer_id1])])
self.assertIs(result['ids'], False)
@mute_logger('openerp.sql_db')
def test_fail_id_mistype(self):
result = self.import_(['value/.id'], [["foo"]])
self.assertEqual(result['messages'], [
message(u"Invalid database id 'foo' for the field 'unknown'",
moreinfo=moreaction(res_model='ir.model.data',
domain=[('model','=','export.integer')]))
])
self.assertIs(result['ids'], False)
def test_sub_field(self):
""" Does not implicitly create the record, does not warn that you can't
import m2o subfields (at all)...
"""
result = self.import_(['value/value'], [['42']])
self.assertEqual(result['messages'], [
message(u"Can not create Many-To-One records indirectly, import "
u"the field separately")])
self.assertIs(result['ids'], False)
def test_fail_noids(self):
result = self.import_(['value'], [['nameisnoexist:3']])
self.assertEqual(result['messages'], [message(
u"No matching record found for name 'nameisnoexist:3' "
u"in field 'unknown'", moreinfo=moreaction(
res_model='export.integer'))])
self.assertIs(result['ids'], False)
result = self.import_(['value/id'], [['noxidhere']])
self.assertEqual(result['messages'], [message(
u"No matching record found for external id 'noxidhere' "
u"in field 'unknown'", moreinfo=moreaction(
res_model='ir.model.data', domain=[('model','=','export.integer')]))])
self.assertIs(result['ids'], False)
result = self.import_(['value/.id'], [['66']])
self.assertEqual(result['messages'], [message(
u"No matching record found for database id '66' "
u"in field 'unknown'", moreinfo=moreaction(
res_model='ir.model.data', domain=[('model','=','export.integer')]))])
self.assertIs(result['ids'], False)
def test_fail_multiple(self):
result = self.import_(
['value', 'value/id'],
[['somename', 'somexid']])
self.assertEqual(result['messages'], [message(
u"Ambiguous specification for field 'unknown', only provide one of "
u"name, external id or database id")])
self.assertIs(result['ids'], False)
class test_m2m(ImporterCase):
model_name = 'export.many2many'
# apparently, one and only thing which works is a
# csv_internal_sep-separated list of ids, xids, or names (depending if
# m2m/.id, m2m/id or m2m[/anythingelse]
def test_ids(self):
id1 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
id5 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 99, 'str': 'record4'})
result = self.import_(['value/.id'], [
['%d,%d' % (id1, id2)],
['%d,%d,%d' % (id1, id3, id4)],
['%d,%d,%d' % (id1, id2, id3)],
['%d' % id5]
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 4)
ids = lambda records: [record.id for record in records]
b = self.browse()
self.assertEqual(ids(b[0].value), [id1, id2])
self.assertEqual(values(b[0].value), [3, 44])
self.assertEqual(ids(b[2].value), [id1, id2, id3])
self.assertEqual(values(b[2].value), [3, 44, 84])
def test_noids(self):
result = self.import_(['value/.id'], [['42']])
self.assertEqual(result['messages'], [message(
u"No matching record found for database id '42' in field "
u"'unknown'", moreinfo=moreaction(
res_model='ir.model.data', domain=[('model','=','export.many2many.other')]))])
self.assertIs(result['ids'], False)
def test_xids(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4])
result = self.import_(['value/id'], [
['%s,%s' % (self.xid(records[0]), self.xid(records[1]))],
['%s' % self.xid(records[3])],
['%s,%s' % (self.xid(records[2]), self.xid(records[1]))],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 3)
b = self.browse()
self.assertEqual(values(b[0].value), [3, 44])
self.assertEqual(values(b[2].value), [44, 84])
def test_noxids(self):
result = self.import_(['value/id'], [['noxidforthat']])
self.assertEqual(result['messages'], [message(
u"No matching record found for external id 'noxidforthat' in field"
u" 'unknown'", moreinfo=moreaction(
res_model='ir.model.data', domain=[('model','=','export.many2many.other')]))])
self.assertIs(result['ids'], False)
def test_names(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4])
name = lambda record: record.name_get()[0][1]
result = self.import_(['value'], [
['%s,%s' % (name(records[1]), name(records[2]))],
['%s,%s,%s' % (name(records[0]), name(records[1]), name(records[2]))],
['%s,%s' % (name(records[0]), name(records[3]))],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 3)
b = self.browse()
self.assertEqual(values(b[1].value), [3, 44, 84])
self.assertEqual(values(b[2].value), [3, 9])
def test_nonames(self):
result = self.import_(['value'], [['wherethem2mhavenonames']])
self.assertEqual(result['messages'], [message(
u"No matching record found for name 'wherethem2mhavenonames' in "
u"field 'unknown'", moreinfo=moreaction(
res_model='export.many2many.other'))])
self.assertIs(result['ids'], False)
def test_import_to_existing(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
xid = 'myxid'
result = self.import_(['id', 'value/.id'], [[xid, '%d,%d' % (id1, id2)]])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
result = self.import_(['id', 'value/.id'], [[xid, '%d,%d' % (id3, id4)]])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
b = self.browse()
self.assertEqual(len(b), 1)
# TODO: replacement of existing m2m values is correct?
self.assertEqual(values(b[0].value), [84, 9])
class test_o2m(ImporterCase):
model_name = 'export.one2many'
def test_name_get(self):
s = u'Java is a DSL for taking large XML files and converting them ' \
u'to stack traces'
result = self.import_(
['const', 'value'],
[['5', s]])
self.assertEqual(result['messages'], [message(
u"No matching record found for name '%s' in field 'unknown'" % s,
moreinfo=moreaction(res_model='export.one2many.child'))])
self.assertIs(result['ids'], False)
def test_single(self):
result = self.import_(['const', 'value/value'], [
['5', '63']
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
(b,) = self.browse()
self.assertEqual(b.const, 5)
self.assertEqual(values(b.value), [63])
def test_multicore(self):
result = self.import_(['const', 'value/value'], [
['5', '63'],
['6', '64'],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 2)
b1, b2 = self.browse()
self.assertEqual(b1.const, 5)
self.assertEqual(values(b1.value), [63])
self.assertEqual(b2.const, 6)
self.assertEqual(values(b2.value), [64])
def test_multisub(self):
result = self.import_(['const', 'value/value'], [
['5', '63'],
['', '64'],
['', '65'],
['', '66'],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
(b,) = self.browse()
self.assertEqual(values(b.value), [63, 64, 65, 66])
def test_multi_subfields(self):
result = self.import_(['value/str', 'const', 'value/value'], [
['this', '5', '63'],
['is', '', '64'],
['the', '', '65'],
['rhythm', '', '66'],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
(b,) = self.browse()
self.assertEqual(values(b.value), [63, 64, 65, 66])
self.assertEqual(
values(b.value, 'str'),
'this is the rhythm'.split())
def test_link_inline(self):
""" m2m-style specification for o2ms
"""
id1 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
result = self.import_(['const', 'value/.id'], [
['42', '%d,%d' % (id1, id2)]
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
[b] = self.browse()
self.assertEqual(b.const, 42)
# automatically forces link between core record and o2ms
self.assertEqual(values(b.value), [109, 262])
self.assertEqual(values(b.value, field='parent_id'), [b, b])
def test_link(self):
""" O2M relating to an existing record (update) force a LINK_TO as well
"""
O2M = self.registry('export.one2many.child')
id1 = O2M.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = O2M.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
result = self.import_(['const', 'value/.id'], [
['42', str(id1)],
['', str(id2)],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
[b] = self.browse()
self.assertEqual(b.const, 42)
# automatically forces link between core record and o2ms
self.assertEqual(values(b.value), [109, 262])
self.assertEqual(values(b.value, field='parent_id'), [b, b])
def test_link_2(self):
O2M_c = self.registry('export.one2many.child')
id1 = O2M_c.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = O2M_c.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
result = self.import_(['const', 'value/.id', 'value/value'], [
['42', str(id1), '1'],
['', str(id2), '2'],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
[b] = self.browse()
self.assertEqual(b.const, 42)
self.assertEqual(values(b.value), [1, 2])
self.assertEqual(values(b.value, field='parent_id'), [b, b])
class test_o2m_multiple(ImporterCase):
model_name = 'export.one2many.multiple'
def test_multi_mixed(self):
result = self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', '21'],
['', '12', '22'],
['', '13', '23'],
['', '14', ''],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
[b] = self.browse()
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
def test_multi(self):
result = self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', '21'],
['', '12', ''],
['', '13', ''],
['', '14', ''],
['', '', '22'],
['', '', '23'],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
[b] = self.browse()
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
def test_multi_fullsplit(self):
result = self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', ''],
['', '12', ''],
['', '13', ''],
['', '14', ''],
['', '', '21'],
['', '', '22'],
['', '', '23'],
])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
[b] = self.browse()
self.assertEqual(b.const, 5)
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
class test_realworld(common.TransactionCase):
def test_bigfile(self):
data = json.loads(pkgutil.get_data(self.__module__, 'contacts_big.json'))
result = self.registry('res.partner').load(
self.cr, openerp.SUPERUSER_ID,
['name', 'mobile', 'email', 'image'],
data)
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), len(data))
def test_backlink(self):
data = json.loads(pkgutil.get_data(self.__module__, 'contacts.json'))
result = self.registry('res.partner').load(
self.cr, openerp.SUPERUSER_ID,
["name", "type", "street", "city", "country_id", "category_id",
"supplier", "customer", "is_company", "parent_id"],
data)
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), len(data))
def test_recursive_o2m(self):
""" The content of the o2m field's dict needs to go through conversion
as it may be composed of convertables or other relational fields
"""
self.registry('ir.model.data').clear_caches()
Model = self.registry('export.one2many.recursive')
result = Model.load(self.cr, openerp.SUPERUSER_ID,
['value', 'child/const', 'child/child1/str', 'child/child2/value'],
[
['4', '42', 'foo', '55'],
['', '43', 'bar', '56'],
['', '', 'baz', ''],
['', '55', 'qux', '57'],
['5', '99', 'wheee', ''],
['', '98', '', '12'],
],
context=None)
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 2)
b = Model.browse(self.cr, openerp.SUPERUSER_ID, result['ids'], context=None)
self.assertEqual((b[0].value, b[1].value), (4, 5))
self.assertEqual([child.str for child in b[0].child[1].child1],
['bar', 'baz'])
self.assertFalse(len(b[1].child[1].child1))
self.assertEqual([child.value for child in b[1].child[1].child2],
[12])
class test_date(ImporterCase):
model_name = 'export.date'
def test_empty(self):
self.assertEqual(
self.import_(['value'], []),
{'ids': [], 'messages': []})
def test_basic(self):
result = self.import_(['value'], [['2012-02-03']])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
def test_invalid(self):
result = self.import_(['value'], [['not really a date']])
self.assertEqual(result['messages'], [
message(u"'not really a date' does not seem to be a valid date "
u"for field 'unknown'",
moreinfo=u"Use the format '2012-12-31'")])
self.assertIs(result['ids'], False)
class test_datetime(ImporterCase):
model_name = 'export.datetime'
def test_empty(self):
self.assertEqual(
self.import_(['value'], []),
{'ids': [], 'messages': []})
def test_basic(self):
result = self.import_(['value'], [['2012-02-03 11:11:11']])
self.assertFalse(result['messages'])
self.assertEqual(len(result['ids']), 1)
def test_invalid(self):
result = self.import_(['value'], [['not really a datetime']])
self.assertEqual(result['messages'], [
message(u"'not really a datetime' does not seem to be a valid "
u"datetime for field 'unknown'",
moreinfo=u"Use the format '2012-12-31 23:59:59'")])
self.assertIs(result['ids'], False)
def test_checktz1(self):
""" Imported date should be interpreted as being in the tz provided by
the context
"""
# write dummy tz in user (Asia/Hovd UTC+0700), should be superseded by
# context
self.registry('res.users').write(
self.cr, openerp.SUPERUSER_ID, [openerp.SUPERUSER_ID],
{'tz': 'Asia/Hovd'})
# UTC+1400
result = self.import_(
['value'], [['2012-02-03 11:11:11']], {'tz': 'Pacific/Kiritimati'})
self.assertFalse(result['messages'])
self.assertEqual(
values(self.read(domain=[('id', 'in', result['ids'])])),
['2012-02-02 21:11:11'])
# UTC-0930
result = self.import_(
['value'], [['2012-02-03 11:11:11']], {'tz': 'Pacific/Marquesas'})
self.assertFalse(result['messages'])
self.assertEqual(
values(self.read(domain=[('id', 'in', result['ids'])])),
['2012-02-03 20:41:11'])
def test_usertz(self):
""" If the context does not hold a timezone, the importing user's tz
should be used
"""
# UTC +1000
self.registry('res.users').write(
self.cr, openerp.SUPERUSER_ID, [openerp.SUPERUSER_ID],
{'tz': 'Asia/Yakutsk'})
result = self.import_(
['value'], [['2012-02-03 11:11:11']])
self.assertFalse(result['messages'])
self.assertEqual(
values(self.read(domain=[('id', 'in', result['ids'])])),
['2012-02-03 01:11:11'])
def test_notz(self):
""" If there is no tz either in the context or on the user, falls back
to UTC
"""
self.registry('res.users').write(
self.cr, openerp.SUPERUSER_ID, [openerp.SUPERUSER_ID],
{'tz': False})
result = self.import_(['value'], [['2012-02-03 11:11:11']])
self.assertFalse(result['messages'])
self.assertEqual(
values(self.read(domain=[('id', 'in', result['ids'])])),
['2012-02-03 11:11:11'])
class test_unique(ImporterCase):
model_name = 'export.unique'
@mute_logger('openerp.sql_db')
def test_unique(self):
result = self.import_(['value'], [
['1'],
['1'],
['2'],
['3'],
['3'],
])
self.assertFalse(result['ids'])
self.assertEqual(result['messages'], [
dict(message=u"The value for the field 'value' already exists. "
u"This might be 'unknown' in the current model, "
u"or a field of the same name in an o2m.",
type='error', rows={'from': 1, 'to': 1},
record=1, field='value'),
dict(message=u"The value for the field 'value' already exists. "
u"This might be 'unknown' in the current model, "
u"or a field of the same name in an o2m.",
type='error', rows={'from': 4, 'to': 4},
record=4, field='value'),
])
|
agpl-3.0
|
dcsquared13/Diamond
|
src/collectors/docker_collector/test/testdocker_collector.py
|
15
|
5984
|
#!/usr/bin/python
# coding=utf-8
##########################################################################
import os
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from mock import mock_open
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
from docker import Client
except ImportError:
Client = None
from diamond.collector import Collector
from docker_collector import DockerCollector
dirname = os.path.dirname(__file__)
fixtures_path = os.path.join(dirname, 'fixtures/')
fixtures = []
for root, dirnames, filenames in os.walk(fixtures_path):
fixtures.append([root, dirnames, filenames])
docker_fixture = [
{u'Id': u'c3341726a9b4235a35b390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5',
u'Names': [u'/testcontainer']},
{u'Id': u'0aec7f643ca1cb45f54d41dcabd8fcbcfcbc57170c3e6dd439af1a52761c2bed',
u'Names': [u'/testcontainer3']},
{u'Id': u'9c151939e20682b924d7299875e94a4aabbe946b30b407f89e276507432c625b',
u'Names': None}]
def run_only_if_docker_client_is_available(func):
try:
from docker import Client
except ImportError:
Client = None
pred = lambda: Client is not None
return run_only(func, pred)
class TestDockerCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('DockerCollector', {
'interval': 10,
'byte_unit': 'megabyte',
'memory_path': fixtures_path,
})
self.collector = DockerCollector(config, None)
def test_import(self):
self.assertTrue(DockerCollector)
def test_finds_linux_v2_memory_stat_path(self):
tid = 'c3341726a9b4235a35b390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5'
path = self.collector._memory_stat_path(tid)
self.assertTrue(path is not None)
self.assertTrue(os.path.exists(path))
def test_finds_linux_v3_memory_stat_path(self):
tid = '0aec7f643ca1cb45f54d41dcabd8fcbcfcbc57170c3e6dd439af1a52761c2bed'
path = self.collector._memory_stat_path(tid)
self.assertTrue(path is not None)
self.assertTrue(os.path.exists(path))
def test_doesnt_find_bogus_memory_stat_path(self):
tid = 'testcontainer'
path = self.collector._memory_stat_path(tid)
self.assertTrue(path is None)
@patch('os.path.exists', Mock(return_value=True))
def test_default_memory_path(self):
read_data = "\n".join([
'none /selinux selinuxfs rw,relatime 0 0',
'cgroup /goofy/memory cgroup'
' rw,nosuid,nodev,noexec,relatime,devices 0 0',
'cgroup /mickeymouse/memory cgroup'
' rw,nosuid,nodev,noexec,relatime,memory 0 0',
'tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0',
'',
])
m = mock_open(read_data=read_data)
with patch('__builtin__.open', m, create=True):
self.assertEqual(self.collector._default_memory_path(),
'/mickeymouse/memory')
m.assert_called_once_with('/proc/mounts')
# @run_only_if_docker_client_is_available
# @patch.object(Collector, 'publish')
# @patch.object(Client, 'containers', Mock(return_value=[]))
# @patch.object(Client, 'images', Mock(return_value=[]))
# def test_collect_sunny_day(self, publish_mock):
# self.assertTrue(self.collector.collect())
# self.assertPublishedMany(publish_mock, {
# 'containers_running_count': 100,
# 'containers_stopped_count': 100,
# 'images_count': 100,
# 'images_dangling_count': 100,
# })
# @run_only_if_docker_client_is_available
# @patch('__builtin__.open')
# @patch.object(Client, 'containers', Mock(return_value=[]))
# @patch.object(Collector, 'publish')
# def test_should_open_memory_stat(self, publish_mock, open_mock):
# # open_mock.side_effect = lambda x: StringIO('')
# self.collector.collect()
# print open_mock.mock_calls
# open_mock.assert_any_call(fixtures_path +
# 'docker/c3341726a9b4235a35b'
# '390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5/memory.stat')
# # open_mock.assert_any_call(fixtures_path +
# 'lxc/testcontainer/memory.stat')
# # open_mock.assert_any_call(fixtures_path + 'lxc/memory.stat')
# # open_mock.assert_any_call(fixtures_path + 'memory.stat')
# @run_only_if_docker_client_is_available
# @patch('__builtin__.open')
# @patch.object(Client, 'containers')
# @patch.object(Collector, 'publish')
# def test_should_get_containers(self, publish_mock, containers_mock,
# open_mock):
# containers_mock.return_value = []
# open_mock.side_effect = lambda x: StringIO('')
# self.collector.collect()
# containers_mock.assert_any_call(all=True)
# @run_only_if_docker_client_is_available
# @patch.object(Collector, 'publish')
# @patch.object(Client, 'containers',
# Mock(return_value=docker_fixture))
# def test_should_work_with_real_data(self, publish_mock):
# self.collector.collect()
# self.assertPublishedMany(publish_mock, {
# 'lxc.testcontainer.cache': 1,
# 'lxc.testcontainer.rss': 1,
# 'lxc.testcontainer.swap': 1,
# 'lxc.cache': 1,
# 'lxc.rss': 1,
# 'lxc.swap': 1,
# 'system.cache': 1,
# 'system.rss': 1,
# 'system.swap': 1,
# 'docker.testcontainer.cache': 1,
# 'docker.testcontainer.rss': 1,
# 'docker.testcontainer.swap': 1,
# 'docker.cache': 1,
# 'docker.rss': 1,
# 'docker.swap': 1,
# })
if __name__ == "__main__":
unittest.main()
|
mit
|
Seanmcn/poker
|
poker/hand.py
|
1
|
31782
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division, print_function
import re
import random
import itertools
import functools
from decimal import Decimal
from cached_property import cached_property
from ._common import PokerEnum, _ReprMixin
from .card import Suit, Rank, Card, BROADWAY_RANKS
__all__ = ['Shape', 'Hand', 'Combo', 'Range', 'PAIR_HANDS', 'OFFSUIT_HANDS', 'SUITED_HANDS']
# pregenerated all the possible suit combinations, so we don't have to count them all the time
_PAIR_SUIT_COMBINATIONS = ('cd', 'ch', 'cs', 'dh', 'ds', 'hs')
_OFFSUIT_SUIT_COMBINATIONS = ('cd', 'ch', 'cs', 'dc', 'dh', 'ds',
'hc', 'hd', 'hs', 'sc', 'sd', 'sh')
_SUITED_SUIT_COMBINATIONS = ('cc', 'dd', 'hh', 'ss')
class Shape(PokerEnum):
OFFSUIT = 'o', 'offsuit', 'off'
SUITED = 's', 'suited'
PAIR = '',
class _HandMeta(type):
"""Makes Hand class iterable. __iter__ goes through all hands in ascending order."""
def __new__(metacls, clsname, bases, classdict):
"""Cache all possible Hand instances on the class itself."""
cls = super(_HandMeta, metacls).__new__(metacls, clsname, bases, classdict)
cls._all_hands = tuple(cls._get_non_pairs()) + tuple(cls._get_pairs())
return cls
def _get_non_pairs(cls):
for rank1 in Rank:
for rank2 in (r for r in Rank if r < rank1):
yield cls('{}{}o'.format(rank1, rank2))
yield cls('{}{}s'.format(rank1, rank2))
def _get_pairs(cls):
for rank in Rank:
yield cls(rank.val * 2)
def __iter__(cls):
return iter(cls._all_hands)
def make_random(cls):
self = object.__new__(cls)
first = Rank.make_random()
second = Rank.make_random()
self._set_ranks_in_order(first, second)
if first == second:
self._shape = ''
else:
self._shape = random.choice(['s', 'o'])
return self
@functools.total_ordering
class Hand(_ReprMixin):
"""General hand without a precise suit. Only knows about two ranks and shape."""
__metaclass__ = _HandMeta
__slots__ = ('first', 'second', '_shape')
def __new__(cls, hand):
if isinstance(hand, cls):
return hand
if len(hand) not in (2, 3):
raise ValueError('Length should be 2 (pair) or 3 (hand)')
first, second = hand[:2]
self = object.__new__(cls)
if len(hand) == 2:
if first != second:
raise ValueError('%r, Not a pair! Maybe you need to specify a suit?' % hand)
self._shape = ''
elif len(hand) == 3:
shape = hand[2].lower()
if first == second:
raise ValueError("{!r}; pairs can't have a suit: {!r}".format(hand, shape))
if shape not in ('s', 'o'):
raise ValueError('{!r}; Invalid shape: {!r}'.format(hand, shape))
self._shape = shape
self._set_ranks_in_order(first, second)
return self
def __unicode__(self):
return '{}{}{}'.format(self.first, self.second, self.shape)
def __hash__(self):
return hash(self.first) + hash(self.second) + hash(self.shape)
def __getstate__(self):
return {'first': self.first, 'second': self.second, '_shape': self._shape}
def __setstate__(self, state):
self.first, self.second, self._shape = state['first'], state['second'], state['_shape']
def __eq__(self, other):
if self.__class__ is not other.__class__:
return NotImplemented
# AKs != AKo, because AKs is better
return (self.first == other.first and
self.second == other.second and
self.shape.val == other.shape.val)
def __lt__(self, other):
if self.__class__ is not other.__class__:
return NotImplemented
# pairs are better than non-pairs
if not self.is_pair and other.is_pair:
return True
elif self.is_pair and not other.is_pair:
return False
elif (not self.is_pair and not other.is_pair and
self.first == other.first and self.second == other.second
and self._shape != other._shape):
# when Rank match, only suit is the deciding factor
# so, offsuit hand is 'less' than suited
return self._shape == 'o'
elif self.first == other.first:
return self.second < other.second
else:
return self.first < other.first
def _set_ranks_in_order(self, first, second):
# set as Rank objects.
self.first, self.second = Rank(first), Rank(second)
if self.first < self.second:
self.first, self.second = self.second, self.first
def to_combos(self):
first, second = self.first.val, self.second.val
if self.is_pair:
return tuple(Combo(first + s1 + first + s2) for s1, s2 in _PAIR_SUIT_COMBINATIONS)
elif self.is_offsuit:
return tuple(Combo(first + s1 + second + s2) for s1, s2 in _OFFSUIT_SUIT_COMBINATIONS)
else:
return tuple(Combo(first + s1 + second + s2) for s1, s2 in _SUITED_SUIT_COMBINATIONS)
@property
def is_suited_connector(self):
return self.is_suited and self.is_connector
@property
def is_suited(self):
return self._shape == 's'
@property
def is_offsuit(self):
return self._shape == 'o'
@property
def is_connector(self):
return self.rank_difference == 1
@property
def is_one_gapper(self):
return self.rank_difference == 2
@property
def is_two_gapper(self):
return self.rank_difference == 3
@property
def rank_difference(self):
"""The difference between the first and second rank of the Hand."""
# self.first >= self.second
return Rank.difference(self.first, self.second)
@property
def is_broadway(self):
return (self.first in BROADWAY_RANKS and self.second in BROADWAY_RANKS)
@property
def is_pair(self):
return self.first == self.second
@property
def shape(self):
return Shape(self._shape)
@shape.setter
def shape(self, value):
self._shape = Shape(value).val
PAIR_HANDS = tuple(hand for hand in Hand if hand.is_pair)
"""Tuple of all pair hands in ascending order."""
OFFSUIT_HANDS = tuple(hand for hand in Hand if hand.is_offsuit)
"""Tuple of offsuit hands in ascending order."""
SUITED_HANDS = tuple(hand for hand in Hand if hand.is_suited)
"""Tuple of suited hands in ascending order."""
@functools.total_ordering
class Combo(_ReprMixin):
"""Hand combination."""
__slots__ = ('first', 'second')
def __new__(cls, combo):
if isinstance(combo, Combo):
return combo
if len(combo) != 4:
raise ValueError('%r, should have a length of 4' % combo)
elif (combo[0] == combo[2] and combo[1] == combo[3]):
raise ValueError("{!r}, Pair can't have the same suit: {!r}".format(combo, combo[1]))
self = super(Combo, cls).__new__(cls)
self._set_cards_in_order(combo[:2], combo[2:])
return self
@classmethod
def from_cards(cls, first, second):
self = super(Combo, cls).__new__(cls)
first = first.rank.val + first.suit.val
second = second.rank.val + second.suit.val
self._set_cards_in_order(first, second)
return self
def __unicode__(self):
return '{}{}'.format(self.first, self.second)
def __hash__(self):
return hash(self.first) + hash(self.second)
def __getstate__(self):
return {'first': self.first, 'second': self.second}
def __setstate__(self, state):
self.first, self.second = state['first'], state['second']
def __eq__(self, other):
if self.__class__ is other.__class__:
return self.first == other.first and self.second == other.second
return NotImplemented
def __lt__(self, other):
if self.__class__ is not other.__class__:
return NotImplemented
# lookup optimization
self_is_pair, other_is_pair = self.is_pair, other.is_pair
self_first, other_first = self.first, other.first
if self_is_pair and other_is_pair:
if self_first == other_first:
return self.second < other.second
return self_first < other_first
elif self_is_pair or other_is_pair:
# Pairs are better than non-pairs
return self_is_pair < other_is_pair
else:
if self_first.rank == other_first.rank:
if self.second.rank == other.second.rank:
# same ranks, suited go first in order by Suit rank
if self.is_suited or other.is_suited:
return self.is_suited < other.is_suited
# both are suited
return self_first.suit < other_first.suit
return self.second < other.second
return self_first < other_first
def _set_cards_in_order(self, first, second):
self.first, self.second = Card(first), Card(second)
if self.first < self.second:
self.first, self.second = self.second, self.first
def to_hand(self):
"""Convert combo to :class:`Hand` object, losing suit information."""
return Hand('{}{}{}'.format(self.first.rank, self.second.rank, self.shape))
@property
def is_suited_connector(self):
return self.is_suited and self.is_connector
@property
def is_suited(self):
return self.first.suit == self.second.suit
@property
def is_offsuit(self):
return not self.is_suited and not self.is_pair
@property
def is_connector(self):
return self.rank_difference == 1
@property
def is_one_gapper(self):
return self.rank_difference == 2
@property
def is_two_gapper(self):
return self.rank_difference == 3
@property
def rank_difference(self):
"""The difference between the first and second rank of the Combo."""
# self.first >= self.second
return Rank.difference(self.first.rank, self.second.rank)
@property
def is_pair(self):
return self.first.rank == self.second.rank
@property
def is_broadway(self):
return self.first.is_broadway and self.second.is_broadway
@property
def shape(self):
if self.is_pair:
return Shape.PAIR
elif self.is_suited:
return Shape.SUITED
else:
return Shape.OFFSUIT
@shape.setter
def shape(self, value):
self._shape = Shape(value).val
class _RegexRangeLexer(object):
_separator_re = re.compile(r"[, ;\n]")
_rank = r"([2-9TJQKA])"
_suit = r"[cdhs♣♦♥♠]"
# the second card is not the same as the first
# (negative lookahead for the first matching group)
# this will not match pairs, but will match e.g. 86 or AK
_nonpair1 = r"{0}(?!\1){0}".format(_rank)
_nonpair2 = r"{0}(?!\2){0}".format(_rank)
rules = (
# NAME, REGEX, value extractor METHOD NAME
('ALL', 'XX', '_get_value'),
('PAIR', r"{}\1$".format(_rank), '_get_first'),
('PAIR_PLUS', r"{}\1\+$".format(_rank), '_get_first'),
('PAIR_MINUS', r"{}\1-$".format(_rank), '_get_first'),
('PAIR_DASH', r"{0}\1-{0}\2$".format(_rank), '_get_for_pair_dash'),
('BOTH', _nonpair1 + r"$", '_get_first_two'),
('BOTH_PLUS', r"{}\+$".format(_nonpair1), '_get_first_two'),
('BOTH_MINUS', r"{}-$".format(_nonpair1), '_get_first_two'),
('BOTH_DASH', r"{}-{}$".format(_nonpair1, _nonpair2), '_get_for_both_dash'),
('SUITED', r"{}s$".format(_nonpair1), '_get_first_two'),
('SUITED_PLUS', r"{}s\+$".format(_nonpair1), '_get_first_two'),
('SUITED_MINUS', r"{}s-$".format(_nonpair1), '_get_first_two'),
('SUITED_DASH', r"{}s-{}s$".format(_nonpair1, _nonpair2), '_get_for_shaped_dash'),
('OFFSUIT', r"{}o$".format(_nonpair1), '_get_first_two'),
('OFFSUIT_PLUS', r"{}o\+$".format(_nonpair1), '_get_first_two'),
('OFFSUIT_MINUS', r"{}o-$".format(_nonpair1), '_get_first_two'),
('OFFSUIT_DASH', r"{}o-{}o$".format(_nonpair1, _nonpair2), '_get_for_shaped_dash'),
('X_SUITED', r"{0}Xs$|X{0}s$".format(_rank), '_get_rank'),
('X_SUITED_PLUS', r"{0}Xs\+$|X{0}s\+$".format(_rank), '_get_rank'),
('X_SUITED_MINUS', r"{0}Xs-$|X{0}s-$".format(_rank), '_get_rank'),
('X_OFFSUIT', r"{0}Xo$|X{0}o$".format(_rank), '_get_rank'),
('X_OFFSUIT_PLUS', r"{0}Xo\+$|X{0}o\+$".format(_rank), '_get_rank'),
('X_OFFSUIT_MINUS', r"{0}Xo-$|X{0}o-$".format(_rank), '_get_rank'),
('X_PLUS', r"{0}X\+$|X{0}\+$".format(_rank), '_get_rank'),
('X_MINUS', r"{0}X-$|X{0}-$".format(_rank), '_get_rank'),
('X_BOTH', r"{0}X$|X{0}$".format(_rank), '_get_rank'),
# might be anything, even pair
# FIXME: 5s5s accepted
('COMBO', r"{0}{1}{0}{1}$".format(_rank, _suit), '_get_value'),
)
# compile regexes when initializing class, so every instance will have them precompiled
rules = [(name, re.compile(regex, re.IGNORECASE), method) for (name, regex, method) in rules]
def __init__(self, range=''):
# filter out empty matches
self.parts = [part for part in self._separator_re.split(range) if part]
def __iter__(self):
"""Goes through all the parts and compare them with the regex rules. If it finds a match,
makes an appropriate value for the token and yields them.
"""
for part in self.parts:
for token, regex, method_name in self.rules:
if regex.match(part):
val_method = getattr(self, method_name)
yield token, val_method(part)
break
else:
raise ValueError('Invalid token: %s' % part)
@staticmethod
def _get_value(token):
return token
@staticmethod
def _get_first(token):
return token[0]
@staticmethod
def _get_rank(token):
return token[0] if token[1].upper() == 'X' else token[1]
@classmethod
def _get_in_order(cls, first_part, second_part, token):
smaller, bigger = cls._get_rank_in_order(token, first_part, second_part)
return smaller.val, bigger.val
@classmethod
def _get_first_two(cls, token):
return cls._get_in_order(0, 1, token)
@classmethod
def _get_for_pair_dash(cls, token):
return cls._get_in_order(0, 3, token)
@classmethod
def _get_first_smaller_bigger(cls, first_part, second_part, token):
smaller1, bigger1 = cls._get_rank_in_order(token[first_part], 0, 1)
smaller2, bigger2 = cls._get_rank_in_order(token[second_part], 0, 1)
if bigger1 != bigger2:
raise ValueError('Invalid token: %s' % token)
smaller, bigger = min(smaller1, smaller2), max(smaller1, smaller2)
return bigger1.val, smaller.val, bigger.val
@staticmethod
def _get_rank_in_order(token, first_part, second_part):
first, second = Rank(token[first_part]), Rank(token[second_part])
smaller, bigger = min(first, second), max(first, second)
return smaller, bigger
@classmethod
# for 'A5-AT'
def _get_for_both_dash(cls, token):
return cls._get_first_smaller_bigger(slice(0, 2), slice(3, 5), token)
@classmethod
# for 'A5o-ATo' and 'A5s-ATs'
def _get_for_shaped_dash(cls, token):
return cls._get_first_smaller_bigger(slice(0, 2), slice(4, 6), token)
@functools.total_ordering
class Range(object):
"""Parses a str range into tuple of Combos (or Hands)."""
slots = ('_hands', '_combos')
def __init__(self, range=''):
self._hands = set()
self._combos = set()
for token, value in _RegexRangeLexer(range):
if token == 'ALL':
for card in itertools.combinations('AKQJT98765432', 2):
self._add_offsuit(card)
self._add_suited(card)
for rank in 'AKQJT98765432':
self._add_pair(rank)
# full range, no need to parse any more token
break
elif token == 'PAIR':
self._add_pair(value)
elif token == 'PAIR_PLUS':
smallest = Rank(value)
for rank in (rank.val for rank in Rank if rank >= smallest):
self._add_pair(rank)
elif token == 'PAIR_MINUS':
biggest = Rank(value)
for rank in (rank.val for rank in Rank if rank <= biggest):
self._add_pair(rank)
elif token == 'PAIR_DASH':
first, second = Rank(value[0]), Rank(value[1])
ranks = (rank.val for rank in Rank if first <= rank <= second)
for rank in ranks:
self._add_pair(rank)
elif token == 'BOTH':
self._add_offsuit(value[0] + value[1])
self._add_suited(value[0] + value[1])
elif token == 'X_BOTH':
for rank in (r.val for r in Rank if r < Rank(value)):
self._add_suited(value + rank)
self._add_offsuit(value + rank)
elif token == 'OFFSUIT':
self._add_offsuit(value[0] + value[1])
elif token == 'SUITED':
self._add_suited(value[0] + value[1])
elif token == 'X_OFFSUIT':
biggest = Rank(value)
for rank in (rank.val for rank in Rank if rank < biggest):
self._add_offsuit(value + rank)
elif token == 'X_SUITED':
biggest = Rank(value)
for rank in (rank.val for rank in Rank if rank < biggest):
self._add_suited(value + rank)
elif token == 'BOTH_PLUS':
smaller, bigger = Rank(value[0]), Rank(value[1])
for rank in (rank.val for rank in Rank if smaller <= rank < bigger):
self._add_suited(value[1] + rank)
self._add_offsuit(value[1] + rank)
elif token == 'BOTH_MINUS':
smaller, bigger = Rank(value[0]), Rank(value[1])
for rank in (rank.val for rank in Rank if rank <= smaller):
self._add_suited(value[1] + rank)
self._add_offsuit(value[1] + rank)
elif token in ('X_PLUS', 'X_SUITED_PLUS', 'X_OFFSUIT_PLUS'):
smallest = Rank(value)
first_ranks = (rank for rank in Rank if rank >= smallest)
for rank1 in first_ranks:
second_ranks = (rank for rank in Rank if rank < rank1)
for rank2 in second_ranks:
if token != 'X_OFFSUIT_PLUS':
self._add_suited(rank1.val + rank2.val)
if token != 'X_SUITED_PLUS':
self._add_offsuit(rank1.val + rank2.val)
elif token in ('X_MINUS', 'X_SUITED_MINUS', 'X_OFFSUIT_MINUS'):
biggest = Rank(value)
first_ranks = (rank for rank in Rank if rank <= biggest)
for rank1 in first_ranks:
second_ranks = (rank for rank in Rank if rank < rank1)
for rank2 in second_ranks:
if token != 'X_OFFSUIT_MINUS':
self._add_suited(rank1.val + rank2.val)
if token != 'X_SUITED_MINUS':
self._add_offsuit(rank1.val + rank2.val)
elif token == 'COMBO':
self._combos.add(Combo(value))
elif token == 'OFFSUIT_PLUS':
smaller, bigger = Rank(value[0]), Rank(value[1])
for rank in (rank.val for rank in Rank if smaller <= rank < bigger):
self._add_offsuit(value[1] + rank)
elif token == 'OFFSUIT_MINUS':
smaller, bigger = Rank(value[0]), Rank(value[1])
for rank in (rank.val for rank in Rank if rank <= smaller):
self._add_offsuit(value[1] + rank)
elif token == 'SUITED_PLUS':
smaller, bigger = Rank(value[0]), Rank(value[1])
for rank in (rank.val for rank in Rank if smaller <= rank < bigger):
self._add_suited(value[1] + rank)
elif token == 'SUITED_MINUS':
smaller, bigger = Rank(value[0]), Rank(value[1])
for rank in (rank.val for rank in Rank if rank <= smaller):
self._add_suited(value[1] + rank)
elif token == 'BOTH_DASH':
smaller, bigger = Rank(value[1]), Rank(value[2])
for rank in (rank.val for rank in Rank if smaller <= rank <= bigger):
self._add_offsuit(value[0] + rank)
self._add_suited(value[0] + rank)
elif token == 'OFFSUIT_DASH':
smaller, bigger = Rank(value[1]), Rank(value[2])
for rank in (rank.val for rank in Rank if smaller <= rank <= bigger):
self._add_offsuit(value[0] + rank)
elif token == 'SUITED_DASH':
smaller, bigger = Rank(value[1]), Rank(value[2])
for rank in (rank.val for rank in Rank if smaller <= rank <= bigger):
self._add_suited(value[0] + rank)
@classmethod
def from_file(cls, filename):
"""Creates an instance from a given file, containing a range.
It can handle the PokerCruncher (.rng extension) format.
"""
range_string = Path(filename).open().read()
return cls(range_string)
@classmethod
def from_objects(cls, iterable):
"""Make an instance from an iterable of Combos, Hands or both."""
range_string = ' '.join(unicode(obj) for obj in iterable)
return cls(range_string)
def __eq__(self, other):
if self.__class__ is other.__class__:
return self._all_combos == other._all_combos
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return len(self._all_combos) < len(other._all_combos)
return NotImplemented
def __contains__(self, item):
if isinstance(item, Combo):
return item in self._combos or item.to_hand() in self._hands
elif isinstance(item, Hand):
return item in self._all_hands
elif isinstance(item, unicode):
if len(item) == 4:
combo = Combo(item)
return combo in self._combos or combo.to_hand() in self._hands
else:
return Hand(item) in self._all_hands
def __len__(self):
return self._count_combos()
def __unicode__(self):
return ', '.join(self.rep_pieces)
def __str__(self):
return unicode(self).encode('utf-8')
def __repr__(self):
range = ' '.join(self.rep_pieces)
return "{}('{}')".format(self.__class__.__name__, range).encode('utf-8')
def __getstate__(self):
return {'_hands': self._hands, '_combos': self._combos}
def __setstate__(self, state):
self._hands, self._combos = state['_hands'], state['_combos']
def __hash__(self):
return hash(self.combos)
def to_html(self):
"""Returns a 13x13 HTML table representing the range.
The table's CSS class is ``range``, pair cells (td element) are ``pair``, offsuit hands are
``offsuit`` and suited hand cells has ``suited`` css class.
The HTML contains no extra whitespace at all.
Calculating it should not take more than 30ms (which takes calculating a 100% range).
"""
# note about speed: I tried with functools.lru_cache, and the initial call was 3-4x slower
# than without it, and the need for calling this will usually be once, so no need to cache
html = ['<table class="range">']
for row in reversed(Rank):
html.append('<tr>')
for col in reversed(Rank):
if row > col:
suit, cssclass = 's', 'suited'
elif row < col:
suit, cssclass = 'o', 'offsuit'
else:
suit, cssclass = '', 'pair'
html.append('<td class="%s">' % cssclass)
hand = Hand(row.val + col.val + suit)
if hand in self.hands:
html.append(unicode(hand))
html.append('</td>')
html.append('</tr>')
html.append('</table>')
return ''.join(html)
def to_ascii(self, border=False):
"""Returns a nicely formatted ASCII table with optional borders."""
table = []
if border:
table.append('┌' + '─────┬' * 12 + '─────┐\n')
line = '├' + '─────┼' * 12 + '─────┤\n'
border = '│ '
lastline = '\n└' + '─────┴' * 12 + '─────┘'
else:
line = border = lastline = ''
for row in reversed(Rank):
for col in reversed(Rank):
if row > col:
suit = 's'
elif row < col:
suit = 'o'
else:
suit = ''
hand = Hand(row.val + col.val + suit)
hand = unicode(hand) if hand in self.hands else ''
table.append(border)
table.append(hand.ljust(4))
if row.val != '2':
table.append(border)
table.append('\n')
table.append(line)
table.append(border)
table.append(lastline)
return ''.join(table)
@property
def rep_pieces(self):
"""List of str pieces how the Range is represented."""
if self._count_combos() == 1326:
return ['XX']
all_combos = self._all_combos
pairs = list(filter(lambda c: c.is_pair, all_combos))
pair_pieces = self._get_pieces(pairs, 6)
suiteds = list(filter(lambda c: c.is_suited, all_combos))
suited_pieces = self._get_pieces(suiteds, 4)
offsuits = list(filter(lambda c: c.is_offsuit, all_combos))
offsuit_pieces = self._get_pieces(offsuits, 12)
pair_strs = self._shorten_pieces(pair_pieces)
suited_strs = self._shorten_pieces(suited_pieces)
offsuit_strs = self._shorten_pieces(offsuit_pieces)
return pair_strs + suited_strs + offsuit_strs
def _get_pieces(self, combos, combos_in_hand):
if not combos:
return []
sorted_combos = sorted(combos, reverse=True)
hands_and_combos = []
current_combos = []
last_combo = sorted_combos[0]
for combo in sorted_combos:
if (last_combo.first.rank == combo.first.rank and
last_combo.second.rank == combo.second.rank):
current_combos.append(combo)
length = len(current_combos)
if length == combos_in_hand:
hands_and_combos.append(combo.to_hand())
current_combos = []
else:
hands_and_combos.extend(current_combos)
current_combos = [combo]
last_combo = combo
# add the remainder if any, current_combos might be empty
hands_and_combos.extend(current_combos)
return hands_and_combos
def _shorten_pieces(self, pieces):
if not pieces:
return []
str_pieces = []
first = last = pieces[0]
for current in pieces[1:]:
if isinstance(last, Combo):
str_pieces.append(unicode(last))
first = last = current
elif isinstance(current, Combo):
str_pieces.append(self._get_format(first, last))
first = last = current
elif ((current.is_pair and Rank.difference(last.first, current.first) == 1) or
(last.first == current.first and
Rank.difference(last.second, current.second) == 1)):
last = current
else:
str_pieces.append(self._get_format(first, last))
first = last = current
# write out any remaining pieces
str_pieces.append(self._get_format(first, last))
return str_pieces
def _get_format(self, first, last):
if first == last:
return unicode(first)
elif (first.is_pair and first.first.val == 'A' or
Rank.difference(first.first, first.second) == 1):
return '%s+' % last
elif last.second.val == '2':
return '%s-' % first
else:
return '{}-{}'.format(first, last)
def _add_pair(self, rank):
self._hands.add(Hand(rank * 2))
def _add_offsuit(self, tok):
self._hands.add(Hand(tok[0] + tok[1] + 'o'))
def _add_suited(self, tok):
self._hands.add(Hand(tok[0] + tok[1] + 's'))
@cached_property
def hands(self):
"""Tuple of hands contained in this range. If only one combo of the same hand is present,
it will be shown here. e.g. ``Range('2s2c').hands == (Hand('22'),)``
"""
return tuple(sorted(self._all_hands))
@cached_property
def combos(self):
return tuple(sorted(self._all_combos))
@cached_property
def percent(self):
"""What percent of combos does this range have compared to all the possible combos.
There are 1326 total combos in Hold'em: 52 * 51 / 2 (because order doesn't matter)
Precision: 2 decimal point
"""
dec_percent = (Decimal(self._count_combos()) / 1326 * 100)
# round to two decimal point
return float(dec_percent.quantize(Decimal('1.00')))
def _count_combos(self):
combo_count = len(self._combos)
for hand in self._hands:
if hand.is_pair:
combo_count += 6
elif hand.is_offsuit:
combo_count += 12
elif hand.is_suited:
combo_count += 4
return combo_count
@cached_property
def _all_combos(self):
hand_combos = {combo for hand in self._hands for combo in hand.to_combos()}
return hand_combos | self._combos
@cached_property
def _all_hands(self):
combo_hands = {combo.to_hand() for combo in self._combos}
return combo_hands | self._hands
if __name__ == '__main__':
import cProfile
print('_all_COMBOS')
cProfile.run("Range('XX')._all_combos", sort='tottime')
print('COMBOS')
cProfile.run("Range('XX').combos", sort='tottime')
print('HANDS')
cProfile.run("Range('XX').hands", sort='tottime')
r = ('KK-QQ, 88-77, A5s, A3s, K8s+, K3s, Q7s+, Q5s, Q3s, J9s-J5s, T4s+, 97s, 95s-93s, 87s, '
'85s-84s, 75s, 64s-63s, 53s, ATo+, K5o+, Q7o-Q5o, J9o-J7o, J4o-J3o, T8o-T3o, 96o+, '
'94o-93o, 86o+, 84o-83o, 76o, 74o, 63o, 54o, 22')
print('R _all_COMBOS')
cProfile.run("Range('%s')._all_combos" % r, sort='tottime')
print('R COMBOS')
cProfile.run("Range('%s').combos" % r, sort='tottime')
print('R HANDS')
cProfile.run("Range('%s').hands" % r, sort='tottime')
|
mit
|
vinodbonala/mm
|
mm/exceptions.py
|
4
|
1512
|
class MMException(Exception):
"""Base mm exception"""
pass
class MetadataContainerException(Exception):
"""Raised when the project's medatacontainer is no longer valid"""
pass
class MMRequestException(Exception):
""" """
pass
class MMUIException(Exception):
""" """
pass
class MMUnsupportedOperationException(Exception):
""" """
pass
class SalesforceMoreThanOneRecord(Exception):
'''
Error Code: 300
The value returned when an external ID exists in more than one record. The
response body contains the list of matching records.
'''
pass
class SalesforceMalformedRequest(Exception):
'''
Error Code: 400
The request couldn't be understood, usually becaue the JSON or XML body contains an error.
'''
pass
class SalesforceExpiredSession(Exception):
'''
Error Code: 401
The session ID or OAuth token used has expired or is invalid. The response
body contains the message and errorCode.
'''
pass
class SalesforceRefusedRequest(Exception):
'''
Error Code: 403
The request has been refused. Verify that the logged-in user has
appropriate permissions.
'''
pass
class SalesforceResourceNotFound(Exception):
'''
Error Code: 404
The requested resource couldn't be found. Check the URI for errors, and
verify that there are no sharing issues.
'''
pass
class SalesforceGeneralError(Exception):
'''
A non-specific Salesforce error.
'''
pass
|
gpl-2.0
|
mpeuster/estate
|
experiments/simple-redis-poc/pox/pox/proto/dhcp_client.py
|
43
|
16184
|
# Copyright 2013 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
DHCP Client stuff
"""
from pox.core import core
log = core.getLogger()
import pox.lib.packet as pkt
from pox.lib.addresses import IPAddr, EthAddr
from pox.lib.util import dpid_to_str, str_to_dpid
from pox.lib.revent import EventMixin, Event
import pox.lib.recoco as recoco
import pox.openflow.libopenflow_01 as of
import time
import random
class DHCPOffer (Event):
"""
Fired when an offer has been received
If you want to immediately accept it, do accept().
If you want to reject it, do reject().
If you want to defer acceptance, do nothing.
"""
def __init__ (self, p):
super(DHCPOffer,self).__init__()
self.offer = p
self.address = p.yiaddr
self.server = p.siaddr
o = p.options.get(p.SERVER_ID_OPT)
if o: self.server = o.addr
o = p.options.get(p.SUBNET_MASK_OPT)
self.subnet_mask = o.addr if o else None
o = p.options.get(p.ROUTERS_OPT)
self.routers = o.addrs if o else []
o = p.options.get(p.DNS_SERVER_OPT)
self.dns_servers = o.addrs if o else []
o = p.options.get(p.REQUEST_LEASE_OPT)
o = o.seconds if o is not None else 86400 # Hmmm...
self._accept = None
def reject (self):
self._accept = False
def accept (self):
self._accept = True
class DHCPOffers (Event):
"""
Fired when all offers in time window have been received
"""
def __init__ (self, offers):
super(DHCPOffers,self).__init__()
self.offers = offers
self.accepted = None
def accept (self, offer):
assert offer in self.offers
self.accepted = offer
class DHCPLeased (Event):
"""
Fired when a lease has been confirmed
"""
def __init__ (self, lease):
super(DHCPLeased,self).__init__()
# Lease is the appropriate offer.
self.lease = lease
class DHCPClientError (Event):
pass
class DHCPClient (EventMixin):
"""
A DHCP client
Currently doesn't do lots of stuff "right" according the RFC2131 Section 4.4,
and the state/timeout management is pretty bad. It does mostly serve to get
you an address under simple circumstances, though.
Feel free to add improvements!
"""
"""
TODO:
* Bind port_name -> port_no later?
* Renew
* Keep track of lease times
"""
_eventMixin_events = set([DHCPOffer, DHCPOffers, DHCPLeased,
DHCPClientError])
_xid = random.randint(1000,0xffFFffFF)
TOTAL_TIMEOUT = 8
OFFER_TIMEOUT = 2
REQUEST_TIMEOUT = 2
DISCOVER_TIMEOUT = 2
# Client states
INIT = 'INIT'
INIT_REBOOT = 'INIT_REBOOT'
SELECTING = 'SELECTING'
REBOOTING = 'REBOOTING'
REQUESTING = 'REQUESTING'
REBINDING = 'REBINDING'
BOUND = 'BOUND'
RENEWING = 'RENEWING'
# Not real DHCP states
NEW = '<NEW>'
ERROR = '<ERROR>'
IDLE = '<IDLE>'
def __init__ (self, dpid, port,
port_eth = None,
auto_accept = False,
install_flows = True,
offer_timeout = None,
request_timeout = None,
total_timeout = None,
discovery_timeout = None,
name = None):
"""
Initializes
port_eth can be True to use the MAC associated with the port by the
switch, None to use the 'dpid MAC', or an EthAddr.
"""
# Accept first offer?
# If True the first non-rejected offer is used immediately, without
# waiting for the offer_timeout window to close.
self.auto_accept = auto_accept
self.install_flows = install_flows
if name is None:
self.log = log
else:
self.log = core.getLogger(name)
if hasattr(dpid, 'dpid'):
dpid = dpid.dpid
self.dpid = dpid
self.port_name = port
self.port_eth = port_eth
self._state = self.NEW
self._start = None
# We keep track of all offers we received
self.offers = []
# XID that messages should have to us should have
self.offer_xid = None
self.ack_xid = None
# Accepted offer
self.accepted = None
# Requested offer
self.requested = None
# Bound offer
self.bound = None
# How long to wait total
self.total_timeout = total_timeout or self.TOTAL_TIMEOUT
self.total_timer = None
# How long to wait for the first offer following a discover
# If we don't hear one, we'll resend the discovery
self.discover_timeout = discovery_timeout or self.DISCOVER_TIMEOUT
self.discover_timer = None
# How long to wait for offers after the first one
self.offer_timeout = offer_timeout or self.OFFER_TIMEOUT
self.offer_timer = None
# How long to wait for ACK/NAK on requested offer
self.request_timeout = request_timeout or self.REQUEST_TIMEOUT
self.request_timer = None
# We add and remove the PacketIn listener. This is its event ID
self._packet_listener = None
self._try_start()
if self.state != self.INIT:
self._listen_for_connection()
def _handle_ConnectionUp (self, event):
self._try_start()
def _listen_for_connection (self):
core.openflow.addListenerByName('ConnectionUp', self._handle_ConnectionUp,
once = True)
def _try_start (self):
if self.state != self.NEW:
return
dpid = self.dpid
port = self.port_name
con = core.openflow.connections.get(dpid, None)
if con is None:
#raise RuntimeError('DPID %s not connected' % (dpid_to_str(dpid),))
self._listen_for_connection()
return
if isinstance(port, str):
if port not in con.ports:
self.log.error('No such port as %s.%s' % (dpid_to_str(dpid), port))
#raise RuntimeError('No such port as %s.%s' % (dpid_to_str(dpid),port))
self.state = self.ERROR
return
self.portno = con.ports[port].port_no
if self.port_eth is None:
self.port_eth = con.eth_addr
elif self.port_eth is True:
self.port_eth = con.ports[port].hw_addr
self.state = self.INIT
def _total_timeout (self):
# If this goes off and we haven't finished, tell the user we failed
self.log.warn('Did not complete successfully')
self.state = self.ERROR
@property
def _secs (self):
return time.time() - self._start
@property
def state (self):
return self._state
@state.setter
def state (self, state):
old = self._state
self.log.debug("Transition: %s -> %s", old, state)
def killtimer (name):
name += '_timer'
a = getattr(self, name)
if a is not None:
a.cancel()
setattr(self, name, None)
def set_state (s, debug = None, warn = None, info = None):
def state_setter ():
if debug: self.log.debug(debug)
if warn: self.log.debug(warn)
if info: self.log.debug(info)
self.state = s
return state_setter
if old == self.INIT:
killtimer('discover')
elif old == self.SELECTING:
killtimer('offer')
elif old == self.REQUESTING:
killtimer('request')
self.requested = None
# Make sure we're seeing packets if needed...
def get_flow (broadcast = False):
fm = of.ofp_flow_mod()
if broadcast:
fm.match.dl_dst = pkt.ETHER_BROADCAST
else:
fm.match.dl_dst = self.port_eth
fm.match.in_port = self.portno
fm.match.dl_type = pkt.ethernet.IP_TYPE
fm.match.nw_proto = pkt.ipv4.UDP_PROTOCOL
fm.match.tp_src = pkt.dhcp.SERVER_PORT
fm.match.tp_dst = pkt.dhcp.CLIENT_PORT
fm.priority += 1
return fm
if state not in (self.IDLE, self.ERROR, self.BOUND):
if self._packet_listener is None:
self._packet_listener = core.openflow.addListenerByName('PacketIn',
self._handle_PacketIn)
if self.install_flows:
fm = get_flow(False)
fm.actions.append(of.ofp_action_output(port = of.OFPP_CONTROLLER))
self._con.send(fm)
fm = get_flow(True)
fm.actions.append(of.ofp_action_output(port = of.OFPP_CONTROLLER))
self._con.send(fm)
else:
if self._packet_listener is not None:
core.openflow.removeListener(self._packet_listener)
self._packet_listener = None
if self.install_flows:
fm = get_flow(False)
fm.command = of.OFPFC_DELETE_STRICT
self._con.send(fm)
fm = get_flow(True)
fm.command = of.OFPFC_DELETE_STRICT
self._con.send(fm)
self._state = state
if state == self.INIT:
assert old in (self.NEW,self.INIT)
# We transition INIT->INIT when discovery times out
if old == self.NEW:
# In this case, we want to set a total timeout
killtimer('total')
self.total_timer = recoco.Timer(self.total_timeout,
self._do_total_timeout)
self._start = time.time()
self._discover()
self.discover_timer = recoco.Timer(self.discover_timeout,
set_state(self.INIT))
elif state == self.SELECTING:
assert old == self.INIT
self.offer_timer = recoco.Timer(self.offer_timeout,
self._do_accept)
elif state == self.REQUESTING:
assert old == self.SELECTING
assert self.requested
self._request()
self.request_timer = recoco.Timer(self.request_timeout,
set_state(self.INIT,info='Timeout'))
elif state == self.BOUND:
killtimer('total')
ev = DHCPLeased(self.bound)
self.log.info("Got %s/%s -> %s",
self.bound.address,
self.bound.subnet_mask,
','.join(str(g) for g in self.bound.routers))
self.raiseEventNoErrors(ev)
#TODO: Handle expiring leases
elif state == self.ERROR:
#TODO: Error info
self.raiseEventNoErrors(DHCPClientError())
def _do_total_timeout (self):
self.log.error('Did not successfully bind in time')
self.state = self.ERROR
def _add_param_requests (self, msg):
req = pkt.DHCP.DHCPParameterRequestOption([
pkt.DHCP.DHCPDNSServersOption,
pkt.DHCP.DHCPRoutersOption,
pkt.DHCP.DHCPSubnetMaskOption,
])
msg.add_option(req)
def _discover (self):
self.offers = []
msg = pkt.dhcp()
self._add_param_requests(msg)
self.offer_xid = self._send(msg, msg.DISCOVER_MSG)
def _request (self):
msg = pkt.dhcp()
msg.siaddr = self.requested.server
#self._add_param_requests(msg)
msg.add_option(pkt.DHCP.DHCPServerIdentifierOption(msg.siaddr))
msg.add_option(pkt.DHCP.DHCPRequestIPOption(self.requested.address))
self.request_xid = self._send(msg, msg.REQUEST_MSG)
@classmethod
def _new_xid (cls):
if cls._xid == 0xffffFFFF:
cls._xid = 0
else:
cls._xid += 1
return cls._xid
def _send (self, msg, msg_type):
msg.flags |= msg.BROADCAST_FLAG
msg.htype = 1
msg.hlen = 6
msg.op = msg.BOOTREQUEST
msg.secs = self._secs
msg.xid = self._new_xid()
msg.chaddr = self.port_eth
#if msg.siaddr != pkt.ipv4.IP_ANY:
# msg.add_option(pkt.DHCP.DHCPServerIdentifierOption(self.msg.siaddr))
msg.add_option(pkt.DHCP.DHCPMsgTypeOption(msg_type))
self._send_dhcp(msg)
return msg.xid
def _send_dhcp (self, msg):
ethp = pkt.ethernet(src=self.port_eth, dst=pkt.ETHER_BROADCAST)
ethp.type = pkt.ethernet.IP_TYPE
ipp = pkt.ipv4()
ipp.srcip = pkt.IP_ANY #NOTE: If rebinding, use existing local IP?
ipp.dstip = pkt.IP_BROADCAST
ipp.protocol = ipp.UDP_PROTOCOL
udpp = pkt.udp()
udpp.srcport = pkt.dhcp.CLIENT_PORT
udpp.dstport = pkt.dhcp.SERVER_PORT
udpp.payload = msg
ipp.payload = udpp
ethp.payload = ipp
po = of.ofp_packet_out(data=ethp.pack())
po.actions.append(of.ofp_action_output(port=self.portno))
self._con.send(po)
@property
def _con (self):
return core.openflow.connections[self.dpid]
def _handle_PacketIn (self, event):
if event.dpid != self.dpid: return
if event.port != self.portno: return
# Is it to us? (Or at least not specifically NOT to us...)
ipp = event.parsed.find('ipv4')
if not ipp or not ipp.parsed:
return
if self.bound and self.bound.address == ipp.dstip:
pass # Okay.
elif ipp.dstip not in (pkt.IP_ANY,pkt.IP_BROADCAST):
return
p = event.parsed.find('dhcp')
if p is None:
return
if not isinstance(p.prev, pkt.udp):
return
udpp = p.prev
if udpp.dstport != pkt.dhcp.CLIENT_PORT:
return
if udpp.srcport != pkt.dhcp.SERVER_PORT:
return
if p.op != p.BOOTREPLY:
return
t = p.options.get(p.MSG_TYPE_OPT)
if t is None:
return
if t.type == p.OFFER_MSG:
if p.xid != self.offer_xid:
if self.state in (self.INIT,self.SELECTING):
self.log.info('Received offer with wrong XID')
else:
self.log.debug('Received unexpected offer with wrong XID')
return
if self.state == self.INIT:
# First offer switches states
self.state = self.SELECTING
if self.state != self.SELECTING:
self.log.warn('Recieved an offer while in state %s', self.state)
return
self._exec_offer(event, p)
elif t.type in (p.ACK_MSG, p.NAK_MSG):
if p.xid != self.request_xid:
if self.state in (self.REQUESTING):
self.log.info('Received ACK/NAK with wrong XID')
else:
self.og.debug('Received unexpected ACK/NAK with wrong XID')
return
if self.state != self.REQUESTING:
self.log.warn('Recieved an ACK/NAK while in state %s', self.state)
return
if t.type == p.NAK_MSG:
self._exec_request_nak(event, p)
else:
self._exec_request_ack(event, p)
def _exec_offer (self, event, p):
o = DHCPOffer(p)
self.offers.append(o)
self.raiseEventNoErrors(o)
if self.auto_accept and (o._accept is not False):
# Good enough!
o._accept = True
self._do_accept()
def _exec_request_ack (self, event, p):
self.bound = self.requested
self.state = self.BOUND
def _exec_request_nak (self, event, p):
self.log.warn('DHCP server NAKed our attempted acceptance of an offer')
# Try again...
self.state = INIT
def _do_accept (self):
ev = DHCPOffers(self.offers)
for o in self.offers:
if o._accept is True:
ev.accepted = o
break
if ev.accepted is None:
for o in self.offers:
if o._accept is not False:
ev.accepted = o
break
self.raiseEventNoErrors(ev)
#TODO: Properly decline offers
if ev.accepted is None:
self.log.info('No offer accepted')
self.state = self.IDLE
return
self.requested = ev.accepted
self.state = self.REQUESTING
def launch (dpid, port, port_eth = None, name = None, __INSTANCE__ = None):
"""
Launch
port_eth unspecified: "DPID MAC"
port_eth enabled: Port MAC
port_eth specified: Use that
"""
if port_eth in (True, None):
pass
else:
port_eth = EthAddr(port_eth)
dpid = str_to_dpid(dpid)
try:
port = int(port)
except:
pass
def dhcpclient_init ():
n = name
if n is None:
s = ''
while True:
if not core.hasComponent("DHCPClient" + s):
n = "DHCPClient" + s
break
s = str(int('0' + s) + 1)
else:
if core.hasComponent(n):
self.log.error("Already have component %s", n)
return
client = DHCPClient(port=port, dpid=dpid, name=n, port_eth=port_eth)
core.register(n, client)
core.call_when_ready(dhcpclient_init, ['openflow'])
|
apache-2.0
|
openstate/yournextrepresentative
|
candidates/tests/test_constituency_view.py
|
2
|
1634
|
from mock import patch
from django_webtest import WebTest
from .auth import TestUserMixin
from .fake_popit import (FakePersonCollection, FakeOrganizationCollection,
FakePostCollection)
@patch('candidates.popit.PopIt')
class TestConstituencyDetailView(TestUserMixin, WebTest):
def test_any_constituency_page_without_login(self, mock_popit):
mock_popit.return_value.organizations = FakeOrganizationCollection
mock_popit.return_value.persons = FakePersonCollection
mock_popit.return_value.posts = FakePostCollection
# Just a smoke test for the moment:
response = self.app.get('/election/2015/post/65808/dulwich-and-west-norwood')
response.mustcontain('<a href="/person/2009/tessa-jowell" class="candidate-name">Tessa Jowell</a> <span class="party">Labour Party</span>')
# There should be no forms on the page if you're not logged in:
self.assertEqual(0, len(response.forms))
def test_any_constituency_page(self, mock_popit):
mock_popit.return_value.organizations = FakeOrganizationCollection
mock_popit.return_value.persons = FakePersonCollection
mock_popit.return_value.posts = FakePostCollection
# Just a smoke test for the moment:
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user
)
response.mustcontain('<a href="/person/2009/tessa-jowell" class="candidate-name">Tessa Jowell</a> <span class="party">Labour Party</span>')
form = response.forms['new-candidate-form']
self.assertTrue(form)
|
agpl-3.0
|
chand3040/cloud_that
|
lms/djangoapps/courseware/migrations/0010_rename_xblock_field_content_to_user_state_summary.py
|
114
|
11590
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'XModuleSettingsField', fields ['usage_id', 'field_name']
db.delete_unique('courseware_xmodulesettingsfield', ['usage_id', 'field_name'])
# Deleting model 'XModuleSettingsField'
db.delete_table('courseware_xmodulesettingsfield')
# Move all content currently stored as Scope.content to Scope.user_state_summary
db.rename_table('courseware_xmodulecontentfield', 'courseware_xmoduleuserstatesummaryfield')
db.rename_column('courseware_xmoduleuserstatesummaryfield', 'definition_id', 'usage_id')
def backwards(self, orm):
# Adding model 'XModuleSettingsField'
db.create_table('courseware_xmodulesettingsfield', (
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True, db_index=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True, db_index=True)),
('value', self.gf('django.db.models.fields.TextField')(default='null')),
('field_name', self.gf('django.db.models.fields.CharField')(max_length=64, db_index=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('usage_id', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
))
db.send_create_signal('courseware', ['XModuleSettingsField'])
# Adding unique constraint on 'XModuleSettingsField', fields ['usage_id', 'field_name']
db.create_unique('courseware_xmodulesettingsfield', ['usage_id', 'field_name'])
db.rename_table('courseware_xmoduleuserstatesummaryfield', 'courseware_xmodulecontentfield')
db.rename_column('courseware_xmodulecontentfield', 'usage_id', 'definition_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'courseware.offlinecomputedgrade': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'OfflineComputedGrade'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'gradeset': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'courseware.offlinecomputedgradelog': {
'Meta': {'ordering': "['-created']", 'object_name': 'OfflineComputedGradeLog'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nstudents': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'seconds': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'courseware.studentmodule': {
'Meta': {'unique_together': "(('student', 'module_state_key', 'course_id'),)", 'object_name': 'StudentModule'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'done': ('django.db.models.fields.CharField', [], {'default': "'na'", 'max_length': '8', 'db_index': 'True'}),
'grade': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_grade': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'module_state_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "'module_id'", 'db_index': 'True'}),
'module_type': ('django.db.models.fields.CharField', [], {'default': "'problem'", 'max_length': '32', 'db_index': 'True'}),
'state': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'courseware.studentmodulehistory': {
'Meta': {'object_name': 'StudentModuleHistory'},
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'grade': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_grade': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'student_module': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['courseware.StudentModule']"}),
'version': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'courseware.xmodulestudentinfofield': {
'Meta': {'unique_together': "(('student', 'field_name'),)", 'object_name': 'XModuleStudentInfoField'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {'default': "'null'"})
},
'courseware.xmodulestudentprefsfield': {
'Meta': {'unique_together': "(('student', 'module_type', 'field_name'),)", 'object_name': 'XModuleStudentPrefsField'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'module_type': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {'default': "'null'"})
},
'courseware.xmoduleuserstatesummaryfield': {
'Meta': {'unique_together': "(('usage_id', 'field_name'),)", 'object_name': 'XModuleUserStateSummaryField'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'usage_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'default': "'null'"})
}
}
complete_apps = ['courseware']
|
agpl-3.0
|
pre-commit/pre-commit-hooks
|
tests/file_contents_sorter_test.py
|
1
|
2582
|
import pytest
from pre_commit_hooks.file_contents_sorter import FAIL
from pre_commit_hooks.file_contents_sorter import main
from pre_commit_hooks.file_contents_sorter import PASS
@pytest.mark.parametrize(
('input_s', 'argv', 'expected_retval', 'output'),
(
(b'', [], FAIL, b'\n'),
(b'lonesome\n', [], PASS, b'lonesome\n'),
(b'missing_newline', [], FAIL, b'missing_newline\n'),
(b'newline\nmissing', [], FAIL, b'missing\nnewline\n'),
(b'missing\nnewline', [], FAIL, b'missing\nnewline\n'),
(b'alpha\nbeta\n', [], PASS, b'alpha\nbeta\n'),
(b'beta\nalpha\n', [], FAIL, b'alpha\nbeta\n'),
(b'C\nc\n', [], PASS, b'C\nc\n'),
(b'c\nC\n', [], FAIL, b'C\nc\n'),
(b'mag ical \n tre vor\n', [], FAIL, b' tre vor\nmag ical \n'),
(b'@\n-\n_\n#\n', [], FAIL, b'#\n-\n@\n_\n'),
(b'extra\n\n\nwhitespace\n', [], FAIL, b'extra\nwhitespace\n'),
(b'whitespace\n\n\nextra\n', [], FAIL, b'extra\nwhitespace\n'),
(
b'fee\nFie\nFoe\nfum\n',
[],
FAIL,
b'Fie\nFoe\nfee\nfum\n',
),
(
b'Fie\nFoe\nfee\nfum\n',
[],
PASS,
b'Fie\nFoe\nfee\nfum\n',
),
(
b'fee\nFie\nFoe\nfum\n',
['--ignore-case'],
PASS,
b'fee\nFie\nFoe\nfum\n',
),
(
b'Fie\nFoe\nfee\nfum\n',
['--ignore-case'],
FAIL,
b'fee\nFie\nFoe\nfum\n',
),
(
b'Fie\nFoe\nfee\nfee\nfum\n',
['--ignore-case'],
FAIL,
b'fee\nfee\nFie\nFoe\nfum\n',
),
(
b'Fie\nFoe\nfee\nfum\n',
['--unique'],
PASS,
b'Fie\nFoe\nfee\nfum\n',
),
(
b'Fie\nFie\nFoe\nfee\nfum\n',
['--unique'],
FAIL,
b'Fie\nFoe\nfee\nfum\n',
),
(
b'fee\nFie\nFoe\nfum\n',
['--unique', '--ignore-case'],
PASS,
b'fee\nFie\nFoe\nfum\n',
),
(
b'fee\nfee\nFie\nFoe\nfum\n',
['--unique', '--ignore-case'],
FAIL,
b'fee\nFie\nFoe\nfum\n',
),
),
)
def test_integration(input_s, argv, expected_retval, output, tmpdir):
path = tmpdir.join('file.txt')
path.write_binary(input_s)
output_retval = main([str(path)] + argv)
assert path.read_binary() == output
assert output_retval == expected_retval
|
mit
|
abalakh/robottelo
|
tests/foreman/cli/test_roles.py
|
1
|
2537
|
# -*- encoding: utf-8 -*-
"""Test for Roles CLI"""
from fauxfactory import gen_string
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.factory import make_role
from robottelo.cli.role import Role
from robottelo.decorators import skip_if_bug_open, stubbed
from robottelo.test import CLITestCase
def valid_names():
"""Returns a tuple of valid role names"""
return(
gen_string('alpha', 15),
gen_string('alphanumeric', 15),
gen_string('numeric', 15),
gen_string('latin1', 15),
gen_string('utf8', 15),
)
class TestRole(CLITestCase):
"""Test class for Roles CLI"""
@skip_if_bug_open('bugzilla', 1138553)
def test_positive_create_role_1(self):
"""@Test: Create new roles and assign to the custom user
@Feature: Roles
@Assert: Assert creation of roles
@BZ: 1138553
"""
for name in valid_names():
with self.subTest(name):
role = make_role({'name': name})
self.assertEqual(role['name'], name)
@skip_if_bug_open('bugzilla', 1138559)
@stubbed
def test_create_role_permission_1(self):
"""@test: Create new roles Use different set of permission
@feature: Roles
@assert: Assert creation of roles with set of permission
@status: manual
"""
pass
@skip_if_bug_open('bugzilla', 1138553)
def test_positive_delete_role_1(self):
"""@Test: Delete roles after creating them
@Feature: Roles
@Assert: Assert deletion of roles
"""
for name in valid_names():
with self.subTest(name):
role = make_role({'name': name})
self.assertEqual(role['name'], name)
Role.delete({'id': role['id']})
with self.assertRaises(CLIReturnCodeError):
Role.info({'id': role['id']})
@skip_if_bug_open('bugzilla', 1138553)
def test_positive_update_role_1(self):
"""@Test: Update roles after creating them
@Feature: Roles
@Assert: Assert updating of roles
"""
role = make_role({'name': gen_string('alpha', 15)})
for new_name in valid_names():
with self.subTest(new_name):
Role.update({
'id': role['id'],
'new-name': new_name,
})
role = Role.info({'id': role['id']})
self.assertEqual(role['name'], new_name)
|
gpl-3.0
|
GitHublong/hue
|
desktop/core/ext-py/Pygments-1.3.1/pygments/styles/murphy.py
|
75
|
2751
|
# -*- coding: utf-8 -*-
"""
pygments.styles.murphy
~~~~~~~~~~~~~~~~~~~~~~
Murphy's style from CodeRay.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class MurphyStyle(Style):
"""
Murphy's style from CodeRay.
"""
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "#666 italic",
Comment.Preproc: "#579 noitalic",
Comment.Special: "#c00 bold",
Keyword: "bold #289",
Keyword.Pseudo: "#08f",
Keyword.Type: "#66f",
Operator: "#333",
Operator.Word: "bold #000",
Name.Builtin: "#072",
Name.Function: "bold #5ed",
Name.Class: "bold #e9e",
Name.Namespace: "bold #0e84b5",
Name.Exception: "bold #F00",
Name.Variable: "#036",
Name.Variable.Instance: "#aaf",
Name.Variable.Class: "#ccf",
Name.Variable.Global: "#f84",
Name.Constant: "bold #5ed",
Name.Label: "bold #970",
Name.Entity: "#800",
Name.Attribute: "#007",
Name.Tag: "#070",
Name.Decorator: "bold #555",
String: "bg:#e0e0ff",
String.Char: "#88F bg:",
String.Doc: "#D42 bg:",
String.Interpol: "bg:#eee",
String.Escape: "bold #666",
String.Regex: "bg:#e0e0ff #000",
String.Symbol: "#fc8 bg:",
String.Other: "#f88",
Number: "bold #60E",
Number.Integer: "bold #66f",
Number.Float: "bold #60E",
Number.Hex: "bold #058",
Number.Oct: "bold #40E",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "#F00 bg:#FAA"
}
|
apache-2.0
|
karlito40/servo
|
tests/wpt/css-tests/css21_dev/xhtml1/support/fonts/makegsubfonts.py
|
820
|
14309
|
import os
import textwrap
from xml.etree import ElementTree
from fontTools.ttLib import TTFont, newTable
from fontTools.misc.psCharStrings import T2CharString
from fontTools.ttLib.tables.otTables import GSUB,\
ScriptList, ScriptRecord, Script, DefaultLangSys,\
FeatureList, FeatureRecord, Feature,\
LookupList, Lookup, AlternateSubst, SingleSubst
# paths
directory = os.path.dirname(__file__)
shellSourcePath = os.path.join(directory, "gsubtest-shell.ttx")
shellTempPath = os.path.join(directory, "gsubtest-shell.otf")
featureList = os.path.join(directory, "gsubtest-features.txt")
javascriptData = os.path.join(directory, "gsubtest-features.js")
outputPath = os.path.join(os.path.dirname(directory), "gsubtest-lookup%d")
baseCodepoint = 0xe000
# -------
# Features
# -------
f = open(featureList, "rb")
text = f.read()
f.close()
mapping = []
for line in text.splitlines():
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
# parse
values = line.split("\t")
tag = values.pop(0)
mapping.append(tag);
# --------
# Outlines
# --------
def addGlyphToCFF(glyphName=None, program=None, private=None, globalSubrs=None, charStringsIndex=None, topDict=None, charStrings=None):
charString = T2CharString(program=program, private=private, globalSubrs=globalSubrs)
charStringsIndex.append(charString)
glyphID = len(topDict.charset)
charStrings.charStrings[glyphName] = glyphID
topDict.charset.append(glyphName)
def makeLookup1():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup1")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
# bump this up so that the sequence is the same as the lookup 3 font
cp += 3
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 1
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = SingleSubst()
subtable.Format = 2
subtable.LookupType = 1
subtable.mapping = {
"%s.pass" % tag : "%s.fail" % tag,
"%s.fail" % tag : "%s.pass" % tag,
}
lookup.SubTable.append(subtable)
path = outputPath % 1 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeLookup3():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup3")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
# tag.default
glyphName = "%s.default" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.alt1,2,3
for i in range(1,4):
glyphName = "%s.alt%d" % (tag, i)
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 3
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = AlternateSubst()
subtable.Format = 1
subtable.LookupType = 3
subtable.alternates = {
"%s.default" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt1" % tag : ["%s.pass" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt2" % tag : ["%s.fail" % tag, "%s.pass" % tag, "%s.fail" % tag],
"%s.alt3" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.pass" % tag]
}
lookup.SubTable.append(subtable)
path = outputPath % 3 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeJavascriptData():
features = sorted(mapping)
outStr = []
outStr.append("")
outStr.append("/* This file is autogenerated by makegsubfonts.py */")
outStr.append("")
outStr.append("/* ")
outStr.append(" Features defined in gsubtest fonts with associated base")
outStr.append(" codepoints for each feature:")
outStr.append("")
outStr.append(" cp = codepoint for feature featX")
outStr.append("")
outStr.append(" cp default PASS")
outStr.append(" cp featX=1 FAIL")
outStr.append(" cp featX=2 FAIL")
outStr.append("")
outStr.append(" cp+1 default FAIL")
outStr.append(" cp+1 featX=1 PASS")
outStr.append(" cp+1 featX=2 FAIL")
outStr.append("")
outStr.append(" cp+2 default FAIL")
outStr.append(" cp+2 featX=1 FAIL")
outStr.append(" cp+2 featX=2 PASS")
outStr.append("")
outStr.append("*/")
outStr.append("")
outStr.append("var gFeatures = {");
cp = baseCodepoint
taglist = []
for tag in features:
taglist.append("\"%s\": 0x%x" % (tag, cp))
cp += 4
outStr.append(textwrap.fill(", ".join(taglist), initial_indent=" ", subsequent_indent=" "))
outStr.append("};");
outStr.append("");
if os.path.exists(javascriptData):
os.remove(javascriptData)
f = open(javascriptData, "wb")
f.write("\n".join(outStr))
f.close()
# build fonts
print "Making lookup type 1 font..."
makeLookup1()
print "Making lookup type 3 font..."
makeLookup3()
# output javascript data
print "Making javascript data file..."
makeJavascriptData()
|
mpl-2.0
|
leekchan/django_test
|
tests/utils_tests/test_decorators.py
|
63
|
3910
|
from django.http import HttpResponse
from django.template import Template, Context
from django.template.response import TemplateResponse
from django.test import TestCase, RequestFactory
from django.utils.decorators import decorator_from_middleware
class ProcessViewMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
pass
process_view_dec = decorator_from_middleware(ProcessViewMiddleware)
@process_view_dec
def process_view(request):
return HttpResponse()
class ClassProcessView(object):
def __call__(self, request):
return HttpResponse()
class_process_view = process_view_dec(ClassProcessView())
class FullMiddleware(object):
def process_request(self, request):
request.process_request_reached = True
def process_view(sef, request, view_func, view_args, view_kwargs):
request.process_view_reached = True
def process_template_response(self, request, response):
request.process_template_response_reached = True
return response
def process_response(self, request, response):
# This should never receive unrendered content.
request.process_response_content = response.content
request.process_response_reached = True
return response
full_dec = decorator_from_middleware(FullMiddleware)
class DecoratorFromMiddlewareTests(TestCase):
"""
Tests for view decorators created using
``django.utils.decorators.decorator_from_middleware``.
"""
rf = RequestFactory()
def test_process_view_middleware(self):
"""
Test a middleware that implements process_view.
"""
process_view(self.rf.get('/'))
def test_callable_process_view_middleware(self):
"""
Test a middleware that implements process_view, operating on a callable class.
"""
class_process_view(self.rf.get('/'))
def test_full_dec_normal(self):
"""
Test that all methods of middleware are called for normal HttpResponses
"""
@full_dec
def normal_view(request):
t = Template("Hello world")
return HttpResponse(t.render(Context({})))
request = self.rf.get('/')
normal_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
# process_template_response must not be called for HttpResponse
self.assertFalse(getattr(request, 'process_template_response_reached', False))
self.assertTrue(getattr(request, 'process_response_reached', False))
def test_full_dec_templateresponse(self):
"""
Test that all methods of middleware are called for TemplateResponses in
the right sequence.
"""
@full_dec
def template_response_view(request):
t = Template("Hello world")
return TemplateResponse(request, t, {})
request = self.rf.get('/')
response = template_response_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
self.assertTrue(getattr(request, 'process_template_response_reached', False))
# response must not be rendered yet.
self.assertFalse(response._is_rendered)
# process_response must not be called until after response is rendered,
# otherwise some decorators like csrf_protect and gzip_page will not
# work correctly. See #16004
self.assertFalse(getattr(request, 'process_response_reached', False))
response.render()
self.assertTrue(getattr(request, 'process_response_reached', False))
# Check that process_response saw the rendered content
self.assertEqual(request.process_response_content, b"Hello world")
|
bsd-3-clause
|
pjdelport/django
|
docs/_ext/applyxrefs.py
|
132
|
1842
|
"""Adds xref targets to the top of files."""
import sys
import os
testing = False
DONT_TOUCH = (
'./index.txt',
)
def target_name(fn):
if fn.endswith('.txt'):
fn = fn[:-4]
return '_' + fn.lstrip('./').replace('/', '-')
def process_file(fn, lines):
lines.insert(0, '\n')
lines.insert(0, '.. %s:\n' % target_name(fn))
try:
with open(fn, 'w') as fp:
fp.writelines(lines)
except IOError:
print("Can't open %s for writing. Not touching it." % fn)
def has_target(fn):
try:
with open(fn, 'r') as fp:
lines = fp.readlines()
except IOError:
print("Can't open or read %s. Not touching it." % fn)
return (True, None)
#print fn, len(lines)
if len(lines) < 1:
print("Not touching empty file %s." % fn)
return (True, None)
if lines[0].startswith('.. _'):
return (True, None)
return (False, lines)
def main(argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 1:
argv.extend('.')
files = []
for root in argv[1:]:
for (dirpath, dirnames, filenames) in os.walk(root):
files.extend([(dirpath, f) for f in filenames])
files.sort()
files = [os.path.join(p, fn) for p, fn in files if fn.endswith('.txt')]
#print files
for fn in files:
if fn in DONT_TOUCH:
print("Skipping blacklisted file %s." % fn)
continue
target_found, lines = has_target(fn)
if not target_found:
if testing:
print('%s: %s' % (fn, lines[0]))
else:
print("Adding xref to %s" % fn)
process_file(fn, lines)
else:
print("Skipping %s: already has a xref" % fn)
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
beeverycreative/BeePanel
|
BeeConnect/Command.py
|
1
|
32606
|
#!/usr/bin/env python3
"""
* Copyright (c) 2015 BEEVC - Electronic Systems This file is part of BEESOFT
* software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either
* version 3 of the License, or (at your option) any later version. BEESOFT is
* distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details. You
* should have received a copy of the GNU General Public License along with
* BEESOFT. If not, see <http://www.gnu.org/licenses/>.
"""
r"""
BeeCommand Class
This class exports some methods with predefined commands to control the BTF
__init__() Initializes current class
isConnected() returns the connection state
startPrinter() Initializes the printer in firmware mode
getStatus() returns the status of the printer
beep() beep during 2s
home() Homes axis XYZ
homeXY() Homes axis XY
homeZ() Homes Z axis
move(x,y,z,e) Relatie move of axis XYZE at given feedrate
GoToFirstCalibrationPoint() Moves the BTF to the first calibration point
GoToSecondCalibrationPoint() Saves calibration offset and moves to second calibration point
GoToThirdCalibrationPoint() Moves the BTF to the third calibration point
GetNozzleTemperature(T) Defines nozzle target setpoint temperature
SetNozzleTemperature() Returns current nozzle temperature
Load() Performs load filament operation
Unload() Performs unload filament operation
GoToHeatPos() Moves the BTF to its heat coordinates during filament change
GoToRestPos() Moves the BTF to its Rest coordinates
GetBeeCode() Get current filament beecode
SetBeeCode(A) Set current filament beecode
initSD() Initializes SD card
CreateFile(f) Creates SD file
OpenFile(f) Opens file in the SD card
StartTransfer(f,a) prepares the printer to receive a block of messages
startSDPrint() Starts printing selected sd file
cancleSDPrint() Cancels SD print
sendBlock() Sends a block of messages
sendBlockMsg() Sends a message from the block
cleanBuffer() Clears communication buffer
getPrintStatus() Gets print status
"""
__author__ = "BVC Electronic Systems"
__license__ = ""
import usb.core
import usb.util
import sys
import os
import time
import math
import BeeConnect.Connection
class Cmd():
connected = None
beeCon = None
MESSAGE_SIZE = 512
BLOCK_SIZE = 64
transmisstionErrors = 0
oldFw = ''
newFw = ''
"""*************************************************************************
Init Method
*************************************************************************"""
def __init__(self, con):
r"""
Init Method
Initializes this class
receives as argument the BeeConnection object and verifies the
connection status
"""
self.beeCon = con
self.connected = self.beeCon.isConnected()
return
"""*************************************************************************
connected Method
*************************************************************************"""
def isConnected(self):
r"""
isConnected method
return the sate of the BTF connection:
connected = True
disconnected = False
"""
return self.connected
"""*************************************************************************
Start Printer Method
*************************************************************************"""
def startPrinter(self):
r"""
startPrinter method
Initializes the printer in firmware mode
"""
resp = self.beeCon.sendCmd("M625\n")
if('Bad M-code 625' in resp): #printer in bootloader mode
print("Printer running in Bootloader Mode")
#print("Changing to firmware")
#self.beeCon.write("M630\n")
#self.beeCon.close()
#time.sleep(1)
return "Bootloader"
elif('ok Q' in resp):
print("Printer running in firmware mode")
return "Firmware"
else:
return ""
return
"""*************************************************************************
getStatus Method
*************************************************************************"""
def getStatus(self):
r"""
getStatus method
returns the current status of the printer
"""
resp = ''
status = ''
done = False
while(not done):
while('s:' not in resp.lower()):
resp += self.beeCon.sendCmd("M625\n")
time.sleep(1)
if('s:3' in resp.lower()):
status = 'Ready'
done = True
elif('s:4' in resp.lower()):
status = 'Moving'
done = True
elif('s:5' in resp.lower()):
status = 'SD_Print'
done = True
elif('s:6' in resp.lower()):
status = 'Transfer'
done = True
elif('s:7' in resp.lower()):
status = 'Pause'
done = True
elif('s:9' in resp.lower()):
status = 'SDown_Wait'
done = True
return status
"""*************************************************************************
beep Method
*************************************************************************"""
def beep(self):
r"""
beep method
performs a beep with 2 seconds duration
"""
self.beeCon.sendCmd("M300 P2000\n")
return
"""*************************************************************************
home Method
*************************************************************************"""
def home(self):
r"""
home method
homes all axis
"""
self.beeCon.sendCmd("G28\n","3")
return
"""*************************************************************************
homeXY Method
*************************************************************************"""
def homeXY(self):
r"""
homeXY method
home axis X and Y
"""
self.beeCon.sendCmd("G28 X0 Y0\n","3")
return
"""*************************************************************************
homeZ Method
*************************************************************************"""
def homeZ(self):
r"""
homeZ method
homes Z axis
"""
self.beeCon.sendCmd("G28 Z0\n","3")
return
"""*************************************************************************
move Method
*************************************************************************"""
def move(self,x=None,y=None,z=None,e=None,f=None, wait = None):
r"""
move method
performs a relative move at a given feedrate current
arguments:
x - X axis displacement
y - Y axis displacement
z - Z axis displacement
e - E extruder displacement
f - feedrate
"""
resp = self.beeCon.sendCmd("M121\n")
#print(resp)
splits = resp.split(" ")
xSplit = splits[2].split(":")
ySplit = splits[3].split(":")
zSplit = splits[4].split(":")
eSplit = splits[5].split(":")
currentX = float(xSplit[1])
currentY = float(ySplit[1])
currentZ = float(zSplit[1])
currentE = float(eSplit[1])
newX = currentX
newY = currentY
newZ = currentZ
newE = currentE
commandStr = ""
if x is not None:
newX = newX + x
if y is not None:
newY = newY + y
if z is not None:
newZ = newZ + z
if e is not None:
newE = newE + e
if f is not None:
newF = float(f)
commandStr = "G1 X" + str(newX) + " Y" + str(newY) + " Z" + str(newZ) + " E" + str(newE) + "F" + str(newF) + "\n"
else:
commandStr = "G1 X" + str(newX) + " Y" + str(newY) + " Z" + str(newZ) + " E" + str(newE) + "\n"
if(wait is not None):
self.beeCon.sendCmd(commandStr)
else:
self.beeCon.sendCmd(commandStr,"3")
return
"""*************************************************************************
GoToFirstCalibrationPoint Method
*************************************************************************"""
def GoToFirstCalibrationPoint(self):
r"""
GoToFirstCalibrationPoint method
moves the printer to the first calibration point
"""
#go to home
self.beeCon.sendCmd("G28\n","3")
#set feedrate
self.beeCon.sendCmd("G1 F15000\n")
#set acceleration
self.beeCon.sendCmd("M206 X400\n")
#go to first point
self.beeCon.sendCmd("G1 X0 Y67 Z2\n")
#set acceleration
self.beeCon.sendCmd("M206 X1000\n")
return
"""*************************************************************************
GoToSecondCalibrationPoint Method
*************************************************************************"""
def GoToSecondCalibrationPoint(self):
r"""
GoToSecondCalibrationPoint method
Saves calibration offset and moves to second calibration point
"""
#record calibration position
self.beeCon.sendCmd("M603\n")
self.beeCon.sendCmd("M601\n")
#set feedrate
self.beeCon.sendCmd("G1 F5000\n")
#set acceleration
self.beeCon.sendCmd("M206 X400\n")
#go to SECOND point
self.move(0,0,10,0)
#self.beeCon.sendCmd("G1 X-31 Y-65\n","3")
self.beeCon.sendCmd("G1 X-31 Y-65\n")
self.move(0,0,-10,0)
return
"""*************************************************************************
GoToThirdCalibrationPoint Method
*************************************************************************"""
def GoToThirdCalibrationPoint(self):
r"""
GoToThirdCalibrationPoint method
moves the printer to the third calibration point
"""
#set feedrate
self.beeCon.sendCmd("G1 F5000\n")
#set acceleration
self.beeCon.sendCmd("M206 X400\n")
self.move(0,0,10,0)
#go to SECOND point
#self.beeCon.sendCmd("G1 X35 Y-65\n","3")
self.beeCon.sendCmd("G1 X35 Y-65\n")
self.move(0,0,-10,0)
return
"""*************************************************************************
GetNozzleTemperature Method
*************************************************************************"""
def GetNozzleTemperature(self):
r"""
GetNozzleTemperature method
reads current nozzle temperature
returns:
nozzle temperature
"""
#get Temperature
resp = self.beeCon.sendCmd("M105\n")
#print(resp)
try:
splits = resp.split(" ")
tPos = splits[0].find("T:")
t = float(splits[0][tPos+2:])
return t
except:
pass
return 0
"""*************************************************************************
SetNozzleTemperature Method
*************************************************************************"""
def SetNozzleTemperature(self, t):
r"""
SetNozzleTemperature method
Sets nozzle target temperature
Arguments:
t - nozzle temperature
"""
commandStr = "M104 S" + str(t) + "\n"
#set Temperature
resp = self.beeCon.sendCmd(commandStr)
#print(resp)
return
"""*************************************************************************
Load Method
*************************************************************************"""
def Load(self):
r"""
load method
performs load filament operation
"""
self.beeCon.sendCmd("G92 E\n")
self.beeCon.sendCmd("M300 P500\n")
self.beeCon.sendCmd("M300 S0 P500\n")
self.beeCon.sendCmd("M300 P500\n")
self.beeCon.sendCmd("M300 S0 P500\n")
self.beeCon.sendCmd("M300 P500\n")
self.beeCon.sendCmd("M300 S0 P500\n")
self.beeCon.sendCmd("G1 F300 E100\n","3")
self.beeCon.sendCmd("G92 E\n")
return
"""*************************************************************************
Unload Method
*************************************************************************"""
def Unload(self):
r"""
Unload method
performs unload operation
"""
self.beeCon.sendCmd("G92 E\n")
self.beeCon.sendCmd("M300 P500\n")
self.beeCon.sendCmd("M300 S0 P500\n")
self.beeCon.sendCmd("M300 P500\n")
self.beeCon.sendCmd("M300 S0 P500\n")
self.beeCon.sendCmd("M300 P500\n")
self.beeCon.sendCmd("M300 S0 P500\n")
self.beeCon.sendCmd("G1 F300 E50\n")
self.beeCon.sendCmd("G92 E\n")
self.beeCon.sendCmd("G1 F1000 E-23\n")
self.beeCon.sendCmd("G1 F800 E2\n")
self.beeCon.sendCmd("G1 F2000 E-23\n")
self.beeCon.sendCmd("G1 F200 E-50\n","3")
self.beeCon.sendCmd("G92 E\n")
return
"""*************************************************************************
GoToHeatPos Method
*************************************************************************"""
def GoToHeatPos(self):
r"""
GoToHeatPos method
moves the printer to the heating coordinates
"""
#set feedrate
self.beeCon.sendCmd("G1 F15000\n")
#set acceleration
self.beeCon.sendCmd("M206 X400\n")
#go to first point
self.beeCon.sendCmd("G1 X30 Y0 Z10\n")
#set acceleration
self.beeCon.sendCmd("M206 X1000\n","3")
return
"""*************************************************************************
GoToRestPos Method
*************************************************************************"""
def GoToRestPos(self):
r"""
GoToRestPos method
moves the printer to the rest position
"""
#set feedrate
self.beeCon.sendCmd("G1 F15000\n")
#set acceleration
self.beeCon.sendCmd("M206 X400\n")
#go to first point
self.beeCon.sendCmd("G1 X-50 Y0 Z110\n")
#set acceleration
self.beeCon.sendCmd("M206 X1000\n","3")
return
"""*************************************************************************
GetBeeCode Method
*************************************************************************"""
def GetBeeCode(self):
r"""
GetBeeCode method
reads current filament BeeCode
returns:
Filament BeeCode
"""
#Get BeeCode
resp = self.beeCon.sendCmd("M400\n")
splits = resp.split(" ")
code = ""
for s in splits:
cPos = s.find("bcode")
if(cPos >= 0):
code = s[cPos+6:]
return code
"""*************************************************************************
SetBeeCode Method
*************************************************************************"""
def SetBeeCode(self, code):
r"""
SetBeeCode method
Sets filament beecode
arguments:
code - filament code
"""
commandStr = "M400 " + code + "\n"
#Set BeeCode
self.beeCon.sendCmd(commandStr)
return
"""*************************************************************************
initSD Method
*************************************************************************"""
def initSD(self):
r"""
initSD method
inits Sd card
"""
#Init SD
self.beeCon.write("M21\n")
tries = 10
resp = ""
while((tries > 0) and ("ok" not in resp.lower())):
try:
resp += self.beeCon.read()
tries -= 1
except:
pass
return tries
"""*************************************************************************
getFileList Method
*************************************************************************"""
def getFileList(self):
fList = {}
fList['FileNames'] = []
fList['FilePaths'] = []
self.initSD()
resp = ""
self.beeCon.write("M20\n")
while("end file list" not in resp.lower()):
resp += self.beeCon.read()
lines = resp.split('\n')
for l in lines:
if("/" in l):
if("firmware.bck" in l.lower()):
pass
elif("firmware.bin" in l.lower()):
pass
elif("config.txt" in l.lower()):
pass
elif("config.bck" in l.lower()):
pass
elif(l == ""):
pass
else:
fName = l[1:len(l)-1]
fList['FileNames'].append(fName)
fList['FilePaths'].append('')
elif("end file list" in l.lower()):
return fList
return fList
"""*************************************************************************
CraeteFile Method
*************************************************************************"""
def CraeteFile(self, fileName):
r"""
CraeteFile method
Creates a file in the SD card root directory
arguments:
fileName - file name
"""
#Init SD
self.initSD()
fn = fileName
if(len(fileName) > 8):
fn = fileName[:8]
cmdStr = "M30 " + fn + "\n"
resp = self.beeCon.sendCmd(cmdStr)
tries = 10
while(tries > 0):
if("file created" in resp.lower()):
print(" :"" SD file created")
break
elif("error" in resp.lower()):
print(" : Error creating file")
return False
else:
resp = self.beeCon.sendCmd("\n")
#print(resp,"...")
tries -= 1
if(tries <= 0):
return False
return True
"""*************************************************************************
OpenFile Method
*************************************************************************"""
def OpenFile(self, fileName):
r"""
OpenFile method
opens file in the sd card root dir
arguments:
fileName - file name
"""
#Init SD
self.initSD()
cmdStr = "M23 " + fileName + "\n"
#Open File
resp = self.beeCon.sendCmd(cmdStr)
tries = 10
while(tries > 0):
if("file opened" in resp.lower()):
print(" :"" SD file opened")
break
else:
resp = self.beeCon.sendCmd("\n")
tries -= 1
if(tries <= 0):
return False
return True
"""*************************************************************************
StartTransfer Method
*************************************************************************"""
def StartTransfer(self, fSize, a):
r"""
StartTransfer method
prepares the printer to receive a block of messages
arguments:
fSize - bytes to be writen
a - initial write position
"""
cmdStr = "M28 D" + str(fSize - 1) + " A" + str(a) + "\n"
#waitStr = "will write " + str(fSize) + " bytes ok"
resp = self.beeCon.sendCmd(cmdStr)
tries = 10
while((tries > 0) and ("ok" not in resp.lower())):
resp += self.beeCon.sendCmd("dummy")
tries -= 1
#print(" :",resp)
if(tries <= 0):
return False
return True
"""*************************************************************************
startSDPrint Method
*************************************************************************"""
def startSDPrint(self, header=False, temp=None):
r"""
startSDPrint method
starts printing selected file
"""
cmd = 'M33'
if(header):
cmd += ' S1'
if(temp is not None):
cmd += ' T' + str(temp)
cmd += '\n'
self.beeCon.sendCmd(cmd)
return True
"""*************************************************************************
cancelSDPrint Method
*************************************************************************"""
def cancelSDPrint(self):
r"""
cancelSDPrint method
cancels current print and home the printer axis
"""
print('Cancelling print')
self.beeCon.write("M112\n",100)
print(self.beeCon.read(100))
self.beeCon.write("G28 Z \n",100)
self.beeCon.read(100)
self.beeCon.write("G28\n",100)
print(self.beeCon.read(100))
print(self.beeCon.read())
#self.beeCon.read()
#self.homeZ()
#self.homeXY()
return True
"""*************************************************************************
sendBlock Method
*************************************************************************"""
def sendBlock(self,startPos, fileObj):
r"""
sendBlock method
writes a block of messages
arguments:
startPos - starting position of block
fileObj - file object with file to write
returns:
True if block transfered successfully
False if an error occurred and communication was reestablished
None if an error occurred and could not reestablish communication with printer
"""
fileObj.seek(startPos)
block2write = fileObj.read(self.MESSAGE_SIZE*self.BLOCK_SIZE)
endPos = startPos + len(block2write)
#self.StartTransfer(endPos,startPos)
self.beeCon.write("M28 D" + str(endPos - 1) + " A" + str(startPos) + "\n")
nMsg = math.ceil(len(block2write)/self.MESSAGE_SIZE)
msgBuf = []
for i in range(nMsg):
if(i < nMsg):
msgBuf.append(block2write[i*self.MESSAGE_SIZE:(i+1)*self.MESSAGE_SIZE])
else:
msgBuf.append(block2write[i*self.MESSAGE_SIZE:])
resp = self.beeCon.read()
while("ok q:0" not in resp.lower()):
resp += self.beeCon.read()
#print(resp)
#resp = self.beeCon.read(10) #force clear buffer
for m in msgBuf:
mResp = self.sendBlockMsg(m)
if(mResp is not True):
return mResp
return True
"""*************************************************************************
sendBlockMsg Method
*************************************************************************"""
def sendBlockMsg(self,msg):
r"""
sendBlockMsg method
sends a block message to the printer.
arguments:
msg - message to be writen
returns:
True if message transfered successfully
False if an error occurred and communication was reestablished
None if an error occurred and could not reestablish communication with printer
"""
#resp = self.beeCon.dispatch(msg)
msgLen = len(msg)
bWriten = self.beeCon.write(msg)
if(msgLen != bWriten):
print("Bytes lost")
return False
time.sleep(0.001)
tries = 10
resp = ""
while((tries > 0) and ("tog" not in resp)):
try:
resp += self.beeCon.read()
tries -= 1
except Exception:
tries = -1
if(tries > 0):
return True
else:
cleaningTries = 5
clean = False
self.transmisstionErrors += 1
while(cleaningTries > 0 and clean == False):
clean = self.cleanBuffer()
time.sleep(0.5)
self.beeCon.close()
self.beeCon = None
self.beeCon = BeeConnect.Connection.Con()
cleaningTries -= 1
if(cleaningTries <= 0):
return None
if(clean == False):
return None
return False
"""*************************************************************************
cleanBuffer Method
*************************************************************************"""
def cleanBuffer(self):
r"""
cleanBuffer method
cleans communication buffer with printer
"""
cleanStr = "M625" + "a"*(self.MESSAGE_SIZE-5) + "\n"
self.beeCon.write(cleanStr,50)
tries = self.BLOCK_SIZE + 1
resp = self.beeCon.read(50)
acc_resp = ""
#resp = ""
while("ok" not in acc_resp.lower() and tries > 0):
print("Cleaning")
try:
self.beeCon.write(cleanStr,25)
self.beeCon.write("",25)
resp = self.beeCon.read(25)
acc_resp += resp
print(resp)
tries -= 1
except Exception:
print("Read timeout")
tries = 0
print(resp)
return tries
"""*************************************************************************
getPrintStatus Method
*************************************************************************"""
def getPrintStatus(self):
printStatus = {}
self.beeCon.write('M32\n')
resp = ""
while('ok' not in resp):
resp += self.beeCon.read()
split = resp.split(' ')
for s in split:
if('A' in s):
printStatus['Estimated Time'] = int(s[1:])
elif('B' in s):
printStatus['Elapsed Time'] = int(s[1:])//(60*1000)
elif('C' in s):
printStatus['Lines'] = int(s[1:])
elif('D' in s):
printStatus['Executed Lines'] = int(s[1:])
return printStatus
"""*************************************************************************
SetBlowerSpeed Method
*************************************************************************"""
def SetBlowerSpeed(self, speed):
cmd = 'M106 S' + str(speed) + '\n'
self.beeCon.sendCmd(cmd)
return
"""*************************************************************************
FlashFirmware Method
*************************************************************************"""
def FlashFirmware(self, fileName):
if(os.path.isfile(fileName) == False):
print(" :","File does not exist")
return
print(" :","Flashing new firmware File: ",fileName)
self.oldFw = self.GetFirmwareVersion()
self.beeCon.sendCmd('M114 A0.0.0\n', 'ok')
cTime = time.time()
message = "M650 A{0}\n".format(os.path.getsize(fileName))
self.beeCon.write(message)
resp = ''
while('ok' not in resp):
resp += self.beeCon.read()
resp = ''
with open(fileName, 'rb') as f:
while True:
buf = f.read(64)
if not buf: break
self.beeCon.write(buf)
ret = []
while (len(ret) != len(buf)):
try:
ret += self.beeCon.ep_in.read(len(buf), 1000)
except usb.core.USBError as e:
if ("timed out" in str(e.args)):
pass
bRet = bytes(ret)
if(bRet not in buf):
print('TODO: MANAGE FIRMWARE FLASHING FAILURE')
return
sys.stdout.write('.')
sys.stdout.flush()
eTime = time.time()
avgSpeed = os.path.getsize(fileName)//(eTime - cTime)
print ("\n :","Flashing completed in", eTime-cTime, 's')
print(" :Average Transfer Speed",avgSpeed)
self.beeCon.sendCmd('M114 A20.0.0\n', 'ok')
self.newFw=self.GetFirmwareVersion()
return
"""*************************************************************************
GetFirmwareVersion Method
*************************************************************************"""
def GetFirmwareVersion(self):
resp = self.beeCon.sendCmd('M115\n','ok')
resp = resp.replace(' ', '')
split = resp.split('ok')
fw = split[0]
return fw
return
|
gpl-2.0
|
weblabdeusto/weblabdeusto
|
server/launch/sample_balanced2_concurrent_experiments/main_machine/lab_and_experiment1/experiment33/server_config.py
|
968
|
1526
|
#!/usr/bin/env python
#-*-*- encoding: utf-8 -*-*-
weblab_xilinx_experiment_xilinx_device = 'FPGA'
weblab_xilinx_experiment_port_number = 1
# This should be something like this:
# import os as _os
# xilinx_home = _os.getenv('XILINX_HOME')
# if xilinx_home == None:
# if _os.name == 'nt':
# xilinx_home = r'C:\Program Files\Xilinx'
# elif _os.name == 'posix':
# xilinx_home = r"/home/nctrun/Xilinx"
#
# if _os.name == 'nt':
# xilinx_impact_full_path = [xilinx_home + r'\bin\nt\impact']
# elif _os.name == 'posix':
# xilinx_impact_full_path = [xilinx_home + r'/bin/lin/impact']
# But for testing we are going to fake it:
xilinx_home = "."
xilinx_impact_full_path = ["python","./tests/unit/weblab/experiment/devices/xilinx_impact/fake_impact.py" ]
xilinx_device_to_program = 'XilinxImpact' # 'JTagBlazer', 'DigilentAdept'
xilinx_device_to_send_commands = 'SerialPort' # 'HttpDevice'
digilent_adept_full_path = ["python","./test/unit/weblab/experiment/devices/digilent_adept/fake_digilent_adept.py" ]
digilent_adept_batch_content = """something with the variable $FILE"""
xilinx_http_device_ip_FPGA = "192.168.50.138"
xilinx_http_device_port_FPGA = 80
xilinx_http_device_app_FPGA = ""
xilinx_batch_content_FPGA = """setMode -bs
setCable -port auto
addDevice -position 1 -file $FILE
Program -p 1
exit
"""
# Though it is not really a FPGA, the webcam url var name depends on the device,
# specified above.
fpga_webcam_url = '''https://www.weblab.deusto.es/webcam/fpga0/image.jpg'''
|
bsd-2-clause
|
OpenTechFund/WebApp
|
opentech/apply/activity/migrations/0005_event.py
|
1
|
1378
|
# Generated by Django 2.0.2 on 2018-07-30 11:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('activity', '0004_update_on_delete_django2'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('when', models.DateTimeField(auto_now_add=True)),
('type', models.CharField(choices=[('UPDATE_LEAD', 'Update Lead'), ('EDIT', 'Edit'), ('NEW_SUBMISSION', 'New Submission'), ('TRANSITION', 'Transition'), ('DETERMINATION_OUTCOME', 'Determination Outcome'), ('INVITED_TO_PROPOSAL', 'Invited To Proposal'), ('REVIEWERS_UPDATED', 'Reviewers Updated'), ('READY_FOR_REVIEW', 'Ready For Review'), ('NEW_REVIEW', 'New Review'), ('COMMENT', 'Comment'), ('PROPOSAL_SUBMITTED', 'Proposal Submitted'), ('OPENED_SEALED', 'Opened Sealed Submission')], max_length=50)),
('by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
('submission', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='funds.ApplicationSubmission', related_name='+')),
],
),
]
|
gpl-2.0
|
herrersystem/grsearch
|
grsearch/tfidf.py
|
1
|
1423
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import math
from grsearch.gsearch import *
def nbr_word(text, is_file):
compt=0
if is_file:
with open(text, 'rb') as f:
filestream=True
while filestream:
line=f.readline()
if len(line) == 0:
break
line=str(line)[2:-1] #convert bytes -> str
for w in line.split(' '):
compt+=1
else:
for w in text.split(' '):
compt+=1
return compt
def calcul_tfidf(keyword, corpus, is_file=False):
global_result,result_tfidf=[],[]
nbr_occur=[0]*len(keyword)
nbr_text=len(corpus)
#Search keyword in all texts.
for text in corpus:
if is_file:
result=search_infile(text, keyword, exactly=True)
else:
result=search(text, keyword, exactly=True)
global_result.append([result, nbr_word(text, is_file)])
#Calcul number of occurences in all texts.
for i,x in enumerate(global_result):
indice, d=0,0
for y in x[0]:
if x[0][indice][1] > 0:
nbr_occur[indice]+=1
indice+=1
for i,rtext in enumerate(global_result):
buff_tfidf=[]
#Cacul tfidf for each keywords.
for j,rkeyword in enumerate(rtext[0]):
#Prevent 0 division.
if nbr_occur[j] == 0:
tf=0.0
idf=0.0
else:
tf=rkeyword[1]/rtext[1]
idf=math.log10(nbr_text/nbr_occur[j])
buff_tfidf.append([rkeyword[0], tf*idf])
result_tfidf.append(list(buff_tfidf))
return result_tfidf
|
mit
|
cristobaltapia/sajou
|
sajou/elements/beam2d.py
|
1
|
11163
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Defines a 2-dimensional Bernoulli beam element
"""
import numpy as np
import scipy.sparse as sparse
from numpy import cumsum
from sajou import loads
from sajou.elements.element import Element
from sajou.utils import Local_Csys_two_points
class Beam2D(Element):
"""
Two-dimensional Bernoulli Beam element.
This beam element connects two nodes and is based on the Bernoulli beam theory.
Optionally the rotations on the end nodes can be released to create hinges.
Parameters
----------
node1: Node instance
first node
node2: Node instance
second node
number: int
number of the element
Attributes
----------
node1: Node instance
first node
node2: Node instance
second node
efs: dict
Element freedom signature. Defines the degree of freedom active (dof)
in each node.
nefmt: dict
Node freedom map table of the element.
n_active_dof: int
number of active *dof*
release_end_1: bool
Release rotation on the node 1 of the beam
release_end_2: bool
Release rotation on the node 2 of the beam
transformation_matrix: ndarray
transformation matrix for the element, from local to global
_k_size: int
size of the stiffness matrix
_nodal_connectivity: dict
defines the order of the nodes in the element
_length: float
length of the element
_localCSys: Csys
local coordinate system of the element
_Ke: ndarray
element stiffness matrix (local coordinates)
_load_vector_e: ndarray
Load vector (global coordinates)
_poly_sec_force: ndarray
Vector to calculate section forces
_beam_section: BeamSection
Beam section
_loads: list[Load]
Loads applied to the frame element
Todo
----
fixed problem with release ends and distribuetd loads
"""
def __init__(self, node1, node2, number):
# Instatiate the Element parent class
Element.__init__(self, number)
self._kind = 'Beam2D'
# TODO: accept tuples with coordinates also
self._node1 = node1
self._node2 = node2
# Element nodal connectivity:
self._nodal_connectivity = {0: node1, 1: node2}
# Element Freedom Signature:
self.efs = {
0: np.array([1, 2, 3], dtype=np.int),
1: np.array([1, 2, 3], dtype=np.int)
}
# Node freedom map table of the element (will be automatically
# calculated when the element stiffness matrix is assebled)
self.nefmt = None
# Total number of active DOFs in the element (will be updated
# when the element stiffness matrix is assembled)
self.n_active_dof = 6
# length of the stiffness matrix
self._k_size = 6
# Make the corresponding nodes aware that they belong to this
# element instance
node1.append_element(self, 0)
node2.append_element(self, 1)
# Calculate the length of the element
self._length = np.linalg.norm(node2 - node1)
# Local coordinate system
self._localCSys = Local_Csys_two_points(
point1=node1, point2=node2, type='cartesian')
# Directive cosines
delta = node2 - node1
cx = delta[0] / self._length
cy = delta[1] / self._length
cz = delta[2] / self._length
# Transformation matrix
self.transformation_matrix = self._localCSys.calc_transformation_matrix(
self._length, cx, cy, cz)
# Stiffness matrix (global coordinates)
self._Ke = None
# Release rotation on the ends of the beam
self.release_end_1 = False # first node
self.release_end_2 = False # second node
# Beam section
self._beam_section = None
# Loads applied to the frame element
self._loads = []
# Initialize the loading vector
self._load_vector_e = np.zeros(self.n_active_dof)
# Vector to calculate section forces
# (The size of this vector depends on the order of the polynom
# describing the section forces [...and deflections TODO])
self._poly_sec_force = np.zeros((4, 3))
def assemble_Ke(self, second_order=False):
"""
Assemble the element stiffness matrix 'Ke' in local and global coordinates.
Parameters
----------
second_order: bool
Returns
-------
array: global stiffness matrix of the element
"""
# Generate the element stiffness matrix
k = self._assemble_Ke()
# Generate sparse matrix
ke_local = sparse.csr_matrix(k)
# Store as property of the object
self._Ke_local = ke_local
# Transform to global coordinates:
Te = self.transformation_matrix
Ke = Te.T @ ke_local @ Te
self._Ke = Ke
# Generate the Element Node Freedom Map Table
self._generate_element_node_freedom_map_dict()
# Calculate total number of active DOFs in the element
sum_dof = np.array([np.sum((nfs >= 1)) for node_i, nfs in self.efs.items()])
self.n_active_dof = np.sum(sum_dof)
return Ke
def distributed_load(self, **kwargs):
"""Assign a DistributedLoad object to the frame current element.
The parameters are the same as defined for the class DistributedLoad()
Parameters
----------
p1: float
value of the distributed load at the start node
p2: float
value of the distributed load at the end node
direction: str
direction of load application. Can be 'x' or 'y'
coord_system:
coordinate system to which the *direction* parameter applies. It can
be 'local' or 'global'
Returns
-------
a DistributedLoad instance:
"""
dist_load = loads.DistributedLoad(elem=self, **kwargs)
# Add this DistributedLoad instance to the list of loads of the
# element
self._loads.append(dist_load)
# Append the load vector (in global coordinates)
self._load_vector_e += dist_load._load_vector_global
self._poly_sec_force += dist_load._poly_sec_force
return 1
def distributed_moment(self, **kwargs):
"""Assign a DistributedLoad object to the frame current element.
Parameters
----------
m1: float
moment applied at the start end of the beam
m2: float
moment applied at the end of the beam
direction: str
direction of application of the moment. Only 'z' is allowed in Beam2D
coord_system: str
coordinate system to which the 'direction' parameter applies
Returns
-------
returns: TODO
Note
----
The parameters are the same as defined for the class DistributedMoment()
"""
dist_moment = loads.DistributedMoment(elem=self, **kwargs)
# Add this DistributedLoad instance to the list of loads of the
# element
self._loads.append(dist_moment)
# Append the load vector (in global coordinates)
self._load_vector_e += dist_moment._load_vector_global
self._poly_sec_force += dist_moment._poly_sec_force
return 1
def release_end(self, which):
"""
Release the rotation DOF of one or both ends of the beam element.
Parameters
----------
which: int, str
specifies the end that should be released. It can be '1', '2' or 'both'
Returns
-------
TODO: bool
"""
# Set the respective property to 'True'
if which == 1:
self.release_end_1 = True
n_new_dof = self._nodal_connectivity[0]._add_dof(self, 1)
self.efs[0][2] = n_new_dof
elif which == 2:
self.release_end_2 = True
n_new_dof = self._nodal_connectivity[1]._add_dof(self, 1)
self.efs[1][2] = n_new_dof
elif which == 'both':
self.release_end_1 = True
self.release_end_2 = True
n_new_dof_1 = self._nodal_connectivity[0]._add_dof(self, 1)
n_new_dof_2 = self._nodal_connectivity[1]._add_dof(self, 1)
self.efs[0][2] = n_new_dof_1
self.efs[1][2] = n_new_dof_2
return 1
def assign_section(self, beam_section):
"""Assign a beam section instance to the beam
Parameters
----------
beam_section: BeamSection instance
section to be assigned
Returns
-------
self: the same Beam2D instance
"""
self._beam_section = beam_section
return self
def _assemble_Ke(self):
"""
Assemble the element stiffness matrix in local coordinates for the Bernoulli beam.
Returns
-------
array: the element stiffness matrix
"""
# Modulus of elasticity
E = self._beam_section._material._data[0]
# Area of the section
EA = self._beam_section._area * E
# Inertias
EI = self._beam_section._Iz * E
# Length of the element
L = self._length
# Initialize stiffness matrix
ke = np.zeros((6, 6), dtype=np.float64)
ke[0, 0] = ke[3, 3] = EA / L
ke[1, 1] = ke[4, 4] = 12. * EI / (L * L * L)
ke[2, 2] = ke[5, 5] = 4. * EI / L
ke[2, 1] = ke[1, 2] = 6. * EI / L**2
ke[3, 0] = ke[0, 3] = -EA / L
ke[4, 1] = ke[1, 4] = -12. * EI / (L * L * L)
ke[4, 2] = ke[2, 4] = -6. * EI / L**2
ke[5, 1] = ke[1, 5] = 6. * EI / L**2
ke[5, 2] = ke[2, 5] = 2. * EI / L
ke[5, 4] = ke[4, 5] = -6. * EI / L**2
return ke
def _generate_element_node_freedom_map_dict(self):
"""
Generate the Node Freedom Map Table of the element.
The Node Freedom Map Table of the element is a dictionary that contains
the index to which each node's first active DOF contributes to within
the element.
It is analogous to the function __generate_node_freedom_map_dict__() from the
Model class.
Returns
-------
array: node freedom map table
"""
# Obtain the number of active DOFs in each node:
# -
# Not sure about this. It seems that this is not really
# necessary, since the total number of DOF and the size of the
# element stiffness matrix is not changed. Thus, the node
# freedom map table remains constant, even when some DOFs are
# not used (e.g. release ends)
# -
#n_active_dof = [sum(nfs) for n_node, nfs in self.efs.items()]
# Obtain the cumulative sum
#enfmt = cumsum(n_active_dof, dtype=np.int) - n_active_dof[0]
# TODO: make this a dictionary
enfmt = np.array([0, 3], dtype=np.int)
self.enfmt = enfmt
return enfmt
|
mit
|
shaded-enmity/ansible-modules-extras
|
cloud/amazon/ec2_eni.py
|
37
|
14246
|
#!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_eni
short_description: Create and optionally attach an Elastic Network Interface (ENI) to an instance
description:
- Create and optionally attach an Elastic Network Interface (ENI) to an instance. If an ENI ID is provided, an attempt is made to update the existing ENI. By passing 'None' as the instance_id, an ENI can be detached from an instance.
version_added: "2.0"
author: Rob White, wimnat [at] gmail.com, @wimnat
options:
eni_id:
description:
- The ID of the ENI
required: false
default: null
instance_id:
description:
- Instance ID that you wish to attach ENI to. To detach an ENI from an instance, use 'None'.
required: false
default: null
private_ip_address:
description:
- Private IP address.
required: false
default: null
subnet_id:
description:
- ID of subnet in which to create the ENI. Only required when state=present.
required: true
description:
description:
- Optional description of the ENI.
required: false
default: null
security_groups:
description:
- List of security groups associated with the interface. Only used when state=present.
required: false
default: null
state:
description:
- Create or delete ENI.
required: false
default: present
choices: [ 'present', 'absent' ]
device_index:
description:
- The index of the device for the network interface attachment on the instance.
required: false
default: 0
force_detach:
description:
- Force detachment of the interface. This applies either when explicitly detaching the interface by setting instance_id to None or when deleting an interface with state=absent.
required: false
default: no
delete_on_termination:
description:
- Delete the interface when the instance it is attached to is terminated. You can only specify this flag when the interface is being modified, not on creation.
required: false
source_dest_check:
description:
- By default, interfaces perform source/destination checks. NAT instances however need this check to be disabled. You can only specify this flag when the interface is being modified, not on creation.
required: false
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Create an ENI. As no security group is defined, ENI will be created in default security group
- ec2_eni:
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
# Create an ENI and attach it to an instance
- ec2_eni:
instance_id: i-xxxxxxx
device_index: 1
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
# Destroy an ENI, detaching it from any instance if necessary
- ec2_eni:
eni_id: eni-xxxxxxx
force_detach: yes
state: absent
# Update an ENI
- ec2_eni:
eni_id: eni-xxxxxxx
description: "My new description"
state: present
# Detach an ENI from an instance
- ec2_eni:
eni_id: eni-xxxxxxx
instance_id: None
state: present
### Delete an interface on termination
# First create the interface
- ec2_eni:
instance_id: i-xxxxxxx
device_index: 1
private_ip_address: 172.31.0.20
subnet_id: subnet-xxxxxxxx
state: present
register: eni
# Modify the interface to enable the delete_on_terminaton flag
- ec2_eni:
eni_id: {{ "eni.interface.id" }}
delete_on_termination: true
'''
import time
import xml.etree.ElementTree as ET
import re
try:
import boto.ec2
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def get_error_message(xml_string):
root = ET.fromstring(xml_string)
for message in root.findall('.//Message'):
return message.text
def get_eni_info(interface):
interface_info = {'id': interface.id,
'subnet_id': interface.subnet_id,
'vpc_id': interface.vpc_id,
'description': interface.description,
'owner_id': interface.owner_id,
'status': interface.status,
'mac_address': interface.mac_address,
'private_ip_address': interface.private_ip_address,
'source_dest_check': interface.source_dest_check,
'groups': dict((group.id, group.name) for group in interface.groups),
}
if interface.attachment is not None:
interface_info['attachment'] = {'attachment_id': interface.attachment.id,
'instance_id': interface.attachment.instance_id,
'device_index': interface.attachment.device_index,
'status': interface.attachment.status,
'attach_time': interface.attachment.attach_time,
'delete_on_termination': interface.attachment.delete_on_termination,
}
return interface_info
def wait_for_eni(eni, status):
while True:
time.sleep(3)
eni.update()
# If the status is detached we just need attachment to disappear
if eni.attachment is None:
if status == "detached":
break
else:
if status == "attached" and eni.attachment.status == "attached":
break
def create_eni(connection, module):
instance_id = module.params.get("instance_id")
if instance_id == 'None':
instance_id = None
do_detach = True
else:
do_detach = False
device_index = module.params.get("device_index")
subnet_id = module.params.get('subnet_id')
private_ip_address = module.params.get('private_ip_address')
description = module.params.get('description')
security_groups = module.params.get('security_groups')
changed = False
try:
eni = compare_eni(connection, module)
if eni is None:
eni = connection.create_network_interface(subnet_id, private_ip_address, description, security_groups)
if instance_id is not None:
try:
eni.attach(instance_id, device_index)
except BotoServerError as ex:
eni.delete()
raise
changed = True
# Wait to allow creation / attachment to finish
wait_for_eni(eni, "attached")
eni.update()
except BotoServerError as e:
module.fail_json(msg=get_error_message(e.args[2]))
module.exit_json(changed=changed, interface=get_eni_info(eni))
def modify_eni(connection, module):
eni_id = module.params.get("eni_id")
instance_id = module.params.get("instance_id")
if instance_id == 'None':
instance_id = None
do_detach = True
else:
do_detach = False
device_index = module.params.get("device_index")
subnet_id = module.params.get('subnet_id')
private_ip_address = module.params.get('private_ip_address')
description = module.params.get('description')
security_groups = module.params.get('security_groups')
force_detach = module.params.get("force_detach")
source_dest_check = module.params.get("source_dest_check")
delete_on_termination = module.params.get("delete_on_termination")
changed = False
try:
# Get the eni with the eni_id specified
eni_result_set = connection.get_all_network_interfaces(eni_id)
eni = eni_result_set[0]
if description is not None:
if eni.description != description:
connection.modify_network_interface_attribute(eni.id, "description", description)
changed = True
if security_groups is not None:
if sorted(get_sec_group_list(eni.groups)) != sorted(security_groups):
connection.modify_network_interface_attribute(eni.id, "groupSet", security_groups)
changed = True
if source_dest_check is not None:
if eni.source_dest_check != source_dest_check:
connection.modify_network_interface_attribute(eni.id, "sourceDestCheck", source_dest_check)
changed = True
if delete_on_termination is not None:
if eni.attachment is not None:
if eni.attachment.delete_on_termination is not delete_on_termination:
connection.modify_network_interface_attribute(eni.id, "deleteOnTermination", delete_on_termination, eni.attachment.id)
changed = True
else:
module.fail_json(msg="Can not modify delete_on_termination as the interface is not attached")
if eni.attachment is not None and instance_id is None and do_detach is True:
eni.detach(force_detach)
wait_for_eni(eni, "detached")
changed = True
else:
if instance_id is not None:
eni.attach(instance_id, device_index)
wait_for_eni(eni, "attached")
changed = True
except BotoServerError as e:
print e
module.fail_json(msg=get_error_message(e.args[2]))
eni.update()
module.exit_json(changed=changed, interface=get_eni_info(eni))
def delete_eni(connection, module):
eni_id = module.params.get("eni_id")
force_detach = module.params.get("force_detach")
try:
eni_result_set = connection.get_all_network_interfaces(eni_id)
eni = eni_result_set[0]
if force_detach is True:
if eni.attachment is not None:
eni.detach(force_detach)
# Wait to allow detachment to finish
wait_for_eni(eni, "detached")
eni.update()
eni.delete()
changed = True
else:
eni.delete()
changed = True
module.exit_json(changed=changed)
except BotoServerError as e:
msg = get_error_message(e.args[2])
regex = re.compile('The networkInterface ID \'.*\' does not exist')
if regex.search(msg) is not None:
module.exit_json(changed=False)
else:
module.fail_json(msg=get_error_message(e.args[2]))
def compare_eni(connection, module):
eni_id = module.params.get("eni_id")
subnet_id = module.params.get('subnet_id')
private_ip_address = module.params.get('private_ip_address')
description = module.params.get('description')
security_groups = module.params.get('security_groups')
try:
all_eni = connection.get_all_network_interfaces(eni_id)
for eni in all_eni:
remote_security_groups = get_sec_group_list(eni.groups)
if (eni.subnet_id == subnet_id) and (eni.private_ip_address == private_ip_address) and (eni.description == description) and (remote_security_groups == security_groups):
return eni
except BotoServerError as e:
module.fail_json(msg=get_error_message(e.args[2]))
return None
def get_sec_group_list(groups):
# Build list of remote security groups
remote_security_groups = []
for group in groups:
remote_security_groups.append(group.id.encode())
return remote_security_groups
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
eni_id = dict(default=None),
instance_id = dict(default=None),
private_ip_address = dict(),
subnet_id = dict(),
description = dict(),
security_groups = dict(type='list'),
device_index = dict(default=0, type='int'),
state = dict(default='present', choices=['present', 'absent']),
force_detach = dict(default='no', type='bool'),
source_dest_check = dict(default=None, type='bool'),
delete_on_termination = dict(default=None, type='bool')
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.ec2, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
state = module.params.get("state")
eni_id = module.params.get("eni_id")
if state == 'present':
if eni_id is None:
if module.params.get("subnet_id") is None:
module.fail_json(msg="subnet_id must be specified when state=present")
create_eni(connection, module)
else:
modify_eni(connection, module)
elif state == 'absent':
if eni_id is None:
module.fail_json(msg="eni_id must be specified")
else:
delete_eni(connection, module)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
# this is magic, see lib/ansible/module_common.py
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
main()
|
gpl-3.0
|
sam-m888/gramps
|
gramps/gui/pluginmanager.py
|
6
|
8526
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2005 Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
The core of the Gramps plugin system. This module provides capability to load
plugins from specified directories and provide information about the loaded
plugins.
Plugins are divided into several categories. These are: reports, tools,
importers, exporters, quick reports, and document generators.
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
import os
from gi.repository import Gtk, GdkPixbuf, Gdk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.utils.callback import Callback
from gramps.gen.plug import BasePluginManager, PluginRegister
from gramps.gen.constfunc import win
from gramps.gen.config import config
from gramps.gen.const import ICON
#-------------------------------------------------------------------------
#
# GuiPluginManager
#
#-------------------------------------------------------------------------
class GuiPluginManager(Callback):
""" PluginManager is a Singleton which manages plugins.
It is the gui implementation using a unique BasePluginmanager.
This class adds the possibility to hide plugins in the GUI via a config
setting
"""
__instance = None
__signals__ = { 'plugins-reloaded' : None }
def get_instance():
""" Use this function to get the instance of the PluginManager """
if GuiPluginManager.__instance is None:
GuiPluginManager.__instance = 1 # Set to 1 for __init__()
GuiPluginManager.__instance = GuiPluginManager()
return GuiPluginManager.__instance
get_instance = staticmethod(get_instance)
def __init__(self):
""" This function should only be run once by get_instance() """
if GuiPluginManager.__instance is not 1:
raise Exception("This class is a singleton. "
"Use the get_instance() method")
Callback.__init__(self)
self.basemgr = BasePluginManager.get_instance()
self.__hidden_plugins = set(config.get('plugin.hiddenplugins'))
# self.__hidden_changed() # See bug 9561
def load_plugin(self, pdata):
if not self.is_loaded(pdata.id):
#load stock icons before import, only gui needs this
if pdata.icons:
if pdata.icondir and os.path.isdir(pdata.icondir):
dir = pdata.icondir
else:
#use the plugin directory
dir = pdata.directory
# Append icon directory to the theme search path
theme = Gtk.IconTheme.get_default()
theme.append_search_path(dir)
return self.basemgr.load_plugin(pdata)
def reload_plugins(self):
self.basemgr.reload_plugins()
self.emit('plugins-reloaded')
def __getattr__(self, name):
return getattr(self.basemgr, name)
def __hidden_changed(self, *args):
#if hidden changed, stored data must be emptied as it could contain
#something that now must be hidden
self.empty_managed_plugins()
#objects that need to know if the plugins available changed, are
#listening to this signal to update themselves. If a plugin becomes
#(un)hidden, this should happen, so we emit.
self.emit('plugins-reloaded')
def get_hidden_plugin_ids(self):
"""
Returns copy of the set hidden plugin ids
"""
return self.__hidden_plugins.copy()
def hide_plugin(self, id):
""" Hide plugin with given id. This will hide the plugin so queries do
not return it anymore, and write this change to the config.
Note that config will then emit a signal
"""
self.__hidden_plugins.add(id)
config.set('plugin.hiddenplugins', list(self.__hidden_plugins))
config.save()
self.__hidden_changed()
def unhide_plugin(self, id):
""" Unhide plugin with given id. This will unhide the plugin so queries
return it again, and write this change to the config
"""
self.__hidden_plugins.remove(id)
config.set('plugin.hiddenplugins', list(self.__hidden_plugins))
config.save()
self.__hidden_changed()
def get_reg_reports(self, gui=True):
""" Return list of non hidden registered reports
:Param gui: bool indicating if GUI reports or CLI reports must be
returned
"""
return [plg for plg in self.basemgr.get_reg_reports(gui)
if plg.id not in self.__hidden_plugins]
def get_reg_tools(self, gui=True):
""" Return list of non hidden registered tools
:Param gui: bool indicating if GUI reports or CLI reports must be
returned
"""
return [plg for plg in self.basemgr.get_reg_tools(gui)
if plg.id not in self.__hidden_plugins]
def get_reg_views(self):
""" Return list of non hidden registered views
"""
return [plg for plg in self.basemgr.get_reg_views()
if plg.id not in self.__hidden_plugins]
def get_reg_quick_reports(self):
""" Return list of non hidden registered quick reports
"""
return [plg for plg in self.basemgr.get_reg_quick_reports()
if plg.id not in self.__hidden_plugins]
def get_reg_mapservices(self):
""" Return list of non hidden registered mapservices
"""
return [plg for plg in self.basemgr.get_reg_mapservices()
if plg.id not in self.__hidden_plugins]
def get_reg_bookitems(self):
""" Return list of non hidden reports registered as bookitem
"""
return [plg for plg in self.basemgr.get_reg_bookitems()
if plg.id not in self.__hidden_plugins]
def get_reg_gramplets(self):
""" Return list of non hidden reports registered as bookitem
"""
return [plg for plg in self.basemgr.get_reg_gramplets()
if plg.id not in self.__hidden_plugins]
def get_reg_sidebars(self):
""" Return list of non hidden registered sidebars
"""
return [plg for plg in self.basemgr.get_reg_sidebars()
if plg.id not in self.__hidden_plugins]
def get_reg_importers(self):
""" Return list of registered importers
"""
return [plg for plg in self.basemgr.get_reg_importers()
if plg.id not in self.__hidden_plugins]
def get_reg_exporters(self):
""" Return list of registered exporters
"""
return [plg for plg in self.basemgr.get_reg_exporters()
if plg.id not in self.__hidden_plugins]
def get_reg_docgens(self):
""" Return list of registered docgen
"""
return [plg for plg in self.basemgr.get_reg_docgens()
if plg.id not in self.__hidden_plugins]
def get_reg_databases(self):
""" Return list of non hidden registered database backends
"""
return [plg for plg in self.basemgr.get_reg_databases()
if plg.id not in self.__hidden_plugins]
def get_reg_general(self, category=None):
return [plg for plg in self.basemgr.get_reg_general(category)
if plg.id not in self.__hidden_plugins]
|
gpl-2.0
|
insidenothing/3D-Printing-Software
|
skein_engines/skeinforge-50/skeinforge_application/skeinforge_utilities/skeinforge_help.py
|
11
|
3508
|
"""
Help has buttons and menu items to open help, blog and forum pages in your primary browser.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities import archive
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_profile
__author__ = 'Enrique Perez ([email protected])'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getNewRepository():
'Get new repository.'
return HelpRepository()
class HelpRepository:
"A class to handle the help settings."
def __init__(self):
"Set the default settings, execute title & settings fileName."
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_utilities.skeinforge_help.html', self)
announcementsText = '- Announcements - '
announcementsLabel = settings.LabelDisplay().getFromName(announcementsText, self )
announcementsLabel.columnspan = 6
settings.LabelDisplay().getFromName('Fabmetheus Blog, Announcements & Questions:', self )
settings.HelpPage().getFromNameAfterHTTP('fabmetheus.blogspot.com/', 'Fabmetheus Blog', self )
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Documentation -', self )
settings.LabelDisplay().getFromName('Local Documentation Table of Contents: ', self )
settings.HelpPage().getFromNameSubName('Contents', self, 'contents.html')
settings.LabelDisplay().getFromName('Wiki Manual with Pictures & Charts: ', self )
settings.HelpPage().getFromNameAfterHTTP('fabmetheus.crsndoo.com/wiki/index.php/Skeinforge', 'Wiki Manual', self )
settings.LabelDisplay().getFromName('Skeinforge Overview: ', self )
settings.HelpPage().getFromNameSubName('Skeinforge Overview', self, 'skeinforge_application.skeinforge.html')
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Search -', self )
settings.LabelDisplay().getFromName('Reprap Search:', self )
settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_reprap.html', 'Reprap Search', self )
settings.LabelDisplay().getFromName('Skeinforge Search:', self )
settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_skeinforge.html', 'Skeinforge Search', self )
settings.LabelDisplay().getFromName('Web Search:', self )
settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_web.html', 'Web Search', self )
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Troubleshooting -', self )
settings.LabelDisplay().getFromName('Skeinforge Forum:', self)
settings.HelpPage().getFromNameAfterHTTP('forums.reprap.org/list.php?154', ' Skeinforge Forum ', self )
settings.LabelSeparator().getFromRepository(self)
self.version = settings.LabelDisplay().getFromName('Version: ' + archive.getFileText(archive.getVersionFileName()), self)
self.wikiManualPrimary = settings.BooleanSetting().getFromValue('Wiki Manual Primary', self, True )
self.wikiManualPrimary.setUpdateFunction( self.save )
def save(self):
"Write the entities."
settings.writeSettingsPrintMessage(self)
|
gpl-2.0
|
otherness-space/myProject
|
my_project_001/lib/python2.7/site-packages/wheel/test/test_signatures.py
|
565
|
1120
|
from wheel import signatures
from wheel.signatures import djbec, ed25519py
from wheel.util import binary
def test_getlib():
signatures.get_ed25519ll()
def test_djbec():
djbec.dsa_test()
djbec.dh_test()
def test_ed25519py():
kp0 = ed25519py.crypto_sign_keypair(binary(' '*32))
kp = ed25519py.crypto_sign_keypair()
signed = ed25519py.crypto_sign(binary('test'), kp.sk)
ed25519py.crypto_sign_open(signed, kp.vk)
try:
ed25519py.crypto_sign_open(signed, kp0.vk)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_keypair(binary(' '*33))
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_open(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
|
mit
|
monikagrabowska/osf.io
|
api/collections/serializers.py
|
4
|
4546
|
from django.db import IntegrityError
from rest_framework import serializers as ser
from rest_framework import exceptions
from framework.exceptions import PermissionsError
from website.models import Node
from osf.models import Collection
from osf.exceptions import ValidationError
from api.base.serializers import LinksField, RelationshipField
from api.base.serializers import JSONAPISerializer, IDField, TypeField, DateByVersion
from api.base.exceptions import InvalidModelValueError
from api.base.utils import absolute_reverse, get_user_auth
from api.nodes.serializers import NodeLinksSerializer
class CollectionSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'title',
'date_created',
'date_modified',
])
id = IDField(source='_id', read_only=True)
type = TypeField()
title = ser.CharField(required=True)
date_created = DateByVersion(read_only=True)
date_modified = DateByVersion(read_only=True)
bookmarks = ser.BooleanField(read_only=False, default=False, source='is_bookmark_collection')
links = LinksField({})
node_links = RelationshipField(
related_view='collections:node-pointers',
related_view_kwargs={'collection_id': '<_id>'},
related_meta={'count': 'get_node_links_count'}
)
# TODO: Add a self link to this when it's available
linked_nodes = RelationshipField(
related_view='collections:linked-nodes',
related_view_kwargs={'collection_id': '<_id>'},
related_meta={'count': 'get_node_links_count'},
self_view='collections:collection-node-pointer-relationship',
self_view_kwargs={'collection_id': '<_id>'}
)
linked_registrations = RelationshipField(
related_view='collections:linked-registrations',
related_view_kwargs={'collection_id': '<_id>'},
related_meta={'count': 'get_registration_links_count'},
self_view='collections:collection-registration-pointer-relationship',
self_view_kwargs={'collection_id': '<_id>'}
)
class Meta:
type_ = 'collections'
def get_absolute_url(self, obj):
return absolute_reverse('collections:collection-detail', kwargs={
'collection_id': obj._id,
'version': self.context['request'].parser_context['kwargs']['version']
})
def get_node_links_count(self, obj):
count = 0
auth = get_user_auth(self.context['request'])
for pointer in obj.linked_nodes.filter(is_deleted=False, type='osf.node'):
if pointer.can_view(auth):
count += 1
return count
def get_registration_links_count(self, obj):
count = 0
auth = get_user_auth(self.context['request'])
for pointer in obj.linked_nodes.filter(is_deleted=False, type='osf.registration'):
if pointer.can_view(auth):
count += 1
return count
def create(self, validated_data):
node = Collection(**validated_data)
node.category = ''
try:
node.save()
except ValidationError as e:
raise InvalidModelValueError(detail=e.messages[0])
except IntegrityError:
raise ser.ValidationError('Each user cannot have more than one Bookmark collection.')
return node
def update(self, node, validated_data):
"""Update instance with the validated data. Requires
the request to be in the serializer context.
"""
assert isinstance(node, Node), 'collection must be a Node'
auth = get_user_auth(self.context['request'])
if validated_data:
try:
node.update(validated_data, auth=auth)
except ValidationError as e:
raise InvalidModelValueError(detail=e.messages[0])
except PermissionsError:
raise exceptions.PermissionDenied
return node
class CollectionDetailSerializer(CollectionSerializer):
"""
Overrides CollectionSerializer to make id required.
"""
id = IDField(source='_id', required=True)
class CollectionNodeLinkSerializer(NodeLinksSerializer):
def get_absolute_url(self, obj):
return absolute_reverse(
'collections:node-pointer-detail',
kwargs={
'collection_id': self.context['request'].parser_context['kwargs']['collection_id'],
'node_link_id': obj._id,
'version': self.context['request'].parser_context['kwargs']['version']
}
)
|
apache-2.0
|
ThiefMaster/indico
|
indico/modules/events/registration/lists.py
|
4
|
8870
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import request
from sqlalchemy.orm import joinedload
from indico.core.db import db
from indico.modules.events.registration.models.form_fields import RegistrationFormFieldData
from indico.modules.events.registration.models.items import PersonalDataType, RegistrationFormItem
from indico.modules.events.registration.models.registrations import Registration, RegistrationData, RegistrationState
from indico.modules.events.util import ListGeneratorBase
from indico.util.i18n import _
from indico.web.flask.templating import get_template_module
class RegistrationListGenerator(ListGeneratorBase):
"""Listing and filtering actions in the registration list."""
endpoint = '.manage_reglist'
list_link_type = 'registration'
def __init__(self, regform):
super().__init__(regform.event, entry_parent=regform)
self.regform = regform
self.default_list_config = {
'items': ('title', 'email', 'affiliation', 'reg_date', 'state'),
'filters': {'fields': {}, 'items': {}}
}
self.static_items = {
'reg_date': {
'title': _('Registration Date'),
},
'price': {
'title': _('Price'),
},
'state': {
'title': _('State'),
'filter_choices': {str(state.value): state.title for state in RegistrationState}
},
'checked_in': {
'title': _('Checked in'),
'filter_choices': {
'0': _('No'),
'1': _('Yes')
}
},
'checked_in_date': {
'title': _('Check-in date'),
},
'payment_date': {
'title': _('Payment date'),
},
}
self.personal_items = ('title', 'first_name', 'last_name', 'email', 'position', 'affiliation', 'address',
'phone', 'country')
self.list_config = self._get_config()
def _get_static_columns(self, ids):
"""
Retrieve information needed for the header of the static
columns (including static and personal items).
:return: a list of {'id': ..., 'caption': ...} dicts
"""
result = []
for item_id in ids:
if item_id in self.personal_items:
field = RegistrationFormItem.query.filter_by(registration_form=self.regform,
personal_data_type=PersonalDataType[item_id]).one()
result.append({
'id': field.id,
'caption': field.title
})
elif item_id in self.static_items:
result.append({
'id': item_id,
'caption': self.static_items[item_id]['title']
})
return result
def _column_ids_to_db(self, ids):
"""Translate string-based ids to DB-based RegistrationFormItem ids."""
result = []
personal_data_field_ids = {x.personal_data_type: x.id for x in self.regform.form_items if x.is_field}
for item_id in ids:
if isinstance(item_id, str):
personal_data_type = PersonalDataType.get(item_id)
if personal_data_type:
item_id = personal_data_field_ids[personal_data_type]
result.append(item_id)
return result
def _get_sorted_regform_items(self, item_ids):
"""
Return the form items ordered by their position in the registration form.
"""
if not item_ids:
return []
return (RegistrationFormItem.query
.filter(~RegistrationFormItem.is_deleted, RegistrationFormItem.id.in_(item_ids))
.with_parent(self.regform)
.join(RegistrationFormItem.parent, aliased=True)
.filter(~RegistrationFormItem.is_deleted) # parent deleted
.order_by(RegistrationFormItem.position) # parent position
.reset_joinpoint()
.order_by(RegistrationFormItem.position) # item position
.all())
def _get_filters_from_request(self):
filters = super()._get_filters_from_request()
for field in self.regform.form_items:
if field.is_field and field.input_type in {'single_choice', 'multi_choice', 'country', 'bool', 'checkbox'}:
options = request.form.getlist(f'field_{field.id}')
if options:
filters['fields'][str(field.id)] = options
return filters
def _build_query(self):
return (Registration.query
.with_parent(self.regform)
.filter(~Registration.is_deleted)
.options(joinedload('data').joinedload('field_data').joinedload('field'))
.order_by(db.func.lower(Registration.last_name), db.func.lower(Registration.first_name)))
def _filter_list_entries(self, query, filters):
if not (filters.get('fields') or filters.get('items')):
return query
field_types = {str(f.id): f.field_impl for f in self.regform.form_items
if f.is_field and not f.is_deleted and (f.parent_id is None or not f.parent.is_deleted)}
field_filters = {field_id: data_list
for field_id, data_list in filters['fields'].items()
if field_id in field_types}
if not field_filters and not filters['items']:
return query
criteria = [db.and_(RegistrationFormFieldData.field_id == field_id,
field_types[field_id].create_sql_filter(data_list))
for field_id, data_list in field_filters.items()]
items_criteria = []
if 'checked_in' in filters['items']:
checked_in_values = filters['items']['checked_in']
# If both values 'true' and 'false' are selected, there's no point in filtering
if len(checked_in_values) == 1:
items_criteria.append(Registration.checked_in == bool(int(checked_in_values[0])))
if 'state' in filters['items']:
states = [RegistrationState(int(state)) for state in filters['items']['state']]
items_criteria.append(Registration.state.in_(states))
if field_filters:
subquery = (RegistrationData.query
.with_entities(db.func.count(RegistrationData.registration_id))
.join(RegistrationData.field_data)
.filter(RegistrationData.registration_id == Registration.id)
.filter(db.or_(*criteria))
.correlate(Registration)
.scalar_subquery())
query = query.filter(subquery == len(field_filters))
return query.filter(db.or_(*items_criteria))
def get_list_kwargs(self):
reg_list_config = self._get_config()
registrations_query = self._build_query()
total_entries = registrations_query.count()
registrations = self._filter_list_entries(registrations_query, reg_list_config['filters']).all()
dynamic_item_ids, static_item_ids = self._split_item_ids(reg_list_config['items'], 'dynamic')
static_columns = self._get_static_columns(static_item_ids)
regform_items = self._get_sorted_regform_items(dynamic_item_ids)
return {
'regform': self.regform,
'registrations': registrations,
'total_registrations': total_entries,
'static_columns': static_columns,
'dynamic_columns': regform_items,
'filtering_enabled': total_entries != len(registrations)
}
def get_list_export_config(self):
static_item_ids, item_ids = self.get_item_ids()
return {
'static_item_ids': static_item_ids,
'regform_items': self._get_sorted_regform_items(item_ids)
}
def get_item_ids(self):
reg_list_config = self._get_config()
static_item_ids, item_ids = self._split_item_ids(reg_list_config['items'], 'static')
return static_item_ids, self._column_ids_to_db(item_ids)
def render_list(self):
reg_list_kwargs = self.get_list_kwargs()
tpl = get_template_module('events/registration/management/_reglist.html')
filtering_enabled = reg_list_kwargs.pop('filtering_enabled')
return {
'html': tpl.render_registration_list(**reg_list_kwargs),
'filtering_enabled': filtering_enabled
}
|
mit
|
gmimano/commcaretest
|
corehq/apps/api/models.py
|
2
|
2686
|
from couchdbkit.exceptions import ResourceNotFound
from couchdbkit.ext.django.schema import *
from django.contrib.auth.models import check_password
from django.http import HttpResponse
from django.conf import settings
import os
from corehq.util.hash_compat import make_password
PERMISSION_POST_SMS = "POST_SMS"
PERMISSION_POST_WISEPILL = "POST_WISEPILL"
class ApiUser(Document):
password = StringProperty()
permissions = ListProperty(StringProperty)
@property
def username(self):
if self['_id'].startswith("ApiUser-"):
return self['_id'][len("ApiUser-"):]
else:
raise Exception("ApiUser _id has to be 'ApiUser-' + username")
def set_password(self, raw_password):
salt = os.urandom(5).encode('hex')
self.password = make_password(raw_password, salt=salt)
def check_password(self, raw_password):
return check_password(raw_password, self.password)
def has_permission(self, permission):
return permission in self.permissions
@classmethod
def create(cls, username, password, permissions=None):
"""
To create a new ApiUser on the server:
./manage.py shell
$ from corehq.apps.api.models import *
$ ApiUser.create('buildserver', 'RANDOM').save()
"""
self = cls()
self['_id'] = "ApiUser-%s" % username
self.set_password(password)
self.permissions = permissions or []
return self
@classmethod
def get_user(cls, username):
return cls.get("ApiUser-%s" % username)
@classmethod
def auth(cls, username, password, permission=None):
try:
user = cls.get_user(username)
if user.check_password(password):
if permission is not None:
return user.has_permission(permission)
else:
return True
else:
return False
except ResourceNotFound:
return False
def _require_api_user(permission=None):
def _outer2(fn):
from django.views.decorators.http import require_POST
if settings.DEBUG:
return fn
@require_POST
def _outer(request, *args, **kwargs):
if ApiUser.auth(request.POST.get('username', ''), request.POST.get('password', ''), permission):
response = fn(request, *args, **kwargs)
else:
response = HttpResponse()
response.status_code = 401
return response
return _outer
return _outer2
require_api_user = _require_api_user()
require_api_user_permission = _require_api_user
|
bsd-3-clause
|
raccoongang/edx-platform
|
openedx/core/djangoapps/coursegraph/tasks.py
|
3
|
12763
|
"""
This file contains a management command for exporting the modulestore to
neo4j, a graph database.
"""
from __future__ import unicode_literals, print_function
import logging
from celery import task
from django.conf import settings
from django.utils import six, timezone
from opaque_keys.edx.keys import CourseKey
from py2neo import Graph, Node, Relationship, authenticate, NodeSelector
from py2neo.compat import integer, string, unicode as neo4j_unicode
from request_cache.middleware import RequestCache
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.store_utilities import DETACHED_XBLOCK_TYPES
from openedx.core.djangoapps.content.course_structures.models import CourseStructure
log = logging.getLogger(__name__)
celery_log = logging.getLogger('edx.celery.task')
# When testing locally, neo4j's bolt logger was noisy, so we'll only have it
# emit logs if there's an error.
bolt_log = logging.getLogger('neo4j.bolt') # pylint: disable=invalid-name
bolt_log.setLevel(logging.ERROR)
PRIMITIVE_NEO4J_TYPES = (integer, string, neo4j_unicode, float, bool)
def serialize_item(item):
"""
Args:
item: an XBlock
Returns:
fields: a dictionary of an XBlock's field names and values
block_type: the name of the XBlock's type (i.e. 'course'
or 'problem')
"""
# convert all fields to a dict and filter out parent and children field
fields = dict(
(field, field_value.read_from(item))
for (field, field_value) in six.iteritems(item.fields)
if field not in ['parent', 'children']
)
course_key = item.scope_ids.usage_id.course_key
block_type = item.scope_ids.block_type
# set or reset some defaults
fields['edited_on'] = six.text_type(getattr(item, 'edited_on', ''))
fields['display_name'] = item.display_name_with_default
fields['org'] = course_key.org
fields['course'] = course_key.course
fields['run'] = course_key.run
fields['course_key'] = six.text_type(course_key)
fields['location'] = six.text_type(item.location)
fields['block_type'] = block_type
fields['detached'] = block_type in DETACHED_XBLOCK_TYPES
if block_type == 'course':
# prune the checklists field
if 'checklists' in fields:
del fields['checklists']
# record the time this command was run
fields['time_last_dumped_to_neo4j'] = six.text_type(timezone.now())
return fields, block_type
def coerce_types(value):
"""
Args:
value: the value of an xblock's field
Returns: either the value, a text version of the value, or, if the
value is a list, a list where each element is converted to text.
"""
coerced_value = value
if isinstance(value, list):
coerced_value = [six.text_type(element) for element in coerced_value]
# if it's not one of the types that neo4j accepts,
# just convert it to text
elif not isinstance(value, PRIMITIVE_NEO4J_TYPES):
coerced_value = six.text_type(value)
return coerced_value
def add_to_transaction(neo4j_entities, transaction):
"""
Args:
neo4j_entities: a list of Nodes or Relationships
transaction: a neo4j transaction
"""
for entity in neo4j_entities:
transaction.create(entity)
def get_command_last_run(course_key, graph):
"""
This information is stored on the course node of a course in neo4j
Args:
course_key: a CourseKey
graph: a py2neo Graph
Returns: The datetime that the command was last run, converted into
text, or None, if there's no record of this command last being run.
"""
selector = NodeSelector(graph)
course_node = selector.select(
"course",
course_key=six.text_type(course_key)
).first()
last_this_command_was_run = None
if course_node:
last_this_command_was_run = course_node['time_last_dumped_to_neo4j']
return last_this_command_was_run
def get_course_last_published(course_key):
"""
We use the CourseStructure table to get when this course was last
published.
Args:
course_key: a CourseKey
Returns: The datetime the course was last published at, converted into
text, or None, if there's no record of the last time this course
was published.
"""
try:
structure = CourseStructure.objects.get(course_id=course_key)
course_last_published_date = six.text_type(structure.modified)
except CourseStructure.DoesNotExist:
course_last_published_date = None
return course_last_published_date
def serialize_course(course_id):
"""
Serializes a course into py2neo Nodes and Relationships
Args:
course_id: CourseKey of the course we want to serialize
Returns:
nodes: a list of py2neo Node objects
relationships: a list of py2neo Relationships objects
"""
# create a location to node mapping we'll need later for
# writing relationships
location_to_node = {}
items = modulestore().get_items(course_id)
# create nodes
for item in items:
fields, block_type = serialize_item(item)
for field_name, value in six.iteritems(fields):
fields[field_name] = coerce_types(value)
node = Node(block_type, 'item', **fields)
location_to_node[item.location] = node
# create relationships
relationships = []
for item in items:
previous_child_node = None
for index, child_loc in enumerate(item.get_children()):
parent_node = location_to_node.get(item.location)
child_node = location_to_node.get(child_loc.location)
child_node["index"] = index
if parent_node is not None and child_node is not None:
relationship = Relationship(parent_node, "PARENT_OF", child_node)
relationships.append(relationship)
if previous_child_node:
ordering_relationship = Relationship(
previous_child_node,
"PRECEDES",
child_node,
)
relationships.append(ordering_relationship)
previous_child_node = child_node
nodes = location_to_node.values()
return nodes, relationships
def should_dump_course(course_key, graph):
"""
Only dump the course if it's been changed since the last time it's been
dumped.
Args:
course_key: a CourseKey object.
graph: a py2neo Graph object.
Returns: bool of whether this course should be dumped to neo4j.
"""
last_this_command_was_run = get_command_last_run(course_key, graph)
course_last_published_date = get_course_last_published(course_key)
# if we don't have a record of the last time this command was run,
# we should serialize the course and dump it
if last_this_command_was_run is None:
return True
# if we've serialized the course recently and we have no published
# events, we will not dump it, and so we can skip serializing it
# again here
if last_this_command_was_run and course_last_published_date is None:
return False
# otherwise, serialize and dump the course if the command was run
# before the course's last published event
return last_this_command_was_run < course_last_published_date
@task(routing_key=settings.COURSEGRAPH_JOB_QUEUE)
def dump_course_to_neo4j(course_key_string, credentials):
"""
Serializes a course and writes it to neo4j.
Arguments:
course_key: course key for the course to be exported
credentials (dict): the necessary credentials to connect
to neo4j and create a py2neo `Graph` obje
"""
course_key = CourseKey.from_string(course_key_string)
nodes, relationships = serialize_course(course_key)
celery_log.info(
"Now dumping %s to neo4j: %d nodes and %d relationships",
course_key,
len(nodes),
len(relationships),
)
graph = authenticate_and_create_graph(credentials)
transaction = graph.begin()
course_string = six.text_type(course_key)
try:
# first, delete existing course
transaction.run(
"MATCH (n:item) WHERE n.course_key='{}' DETACH DELETE n".format(
course_string
)
)
# now, re-add it
add_to_transaction(nodes, transaction)
add_to_transaction(relationships, transaction)
transaction.commit()
celery_log.info("Completed dumping %s to neo4j", course_key)
except Exception: # pylint: disable=broad-except
celery_log.exception(
"Error trying to dump course %s to neo4j, rolling back",
course_string
)
transaction.rollback()
class ModuleStoreSerializer(object):
"""
Class with functionality to serialize a modulestore into subgraphs,
one graph per course.
"""
def __init__(self, course_keys):
self.course_keys = course_keys
@classmethod
def create(cls, courses=None, skip=None):
"""
Sets the object's course_keys attribute from the `courses` parameter.
If that parameter isn't furnished, loads all course_keys from the
modulestore.
Filters out course_keys in the `skip` parameter, if provided.
Args:
courses: A list of string serializations of course keys.
For example, ["course-v1:org+course+run"].
skip: Also a list of string serializations of course keys.
"""
if courses:
course_keys = [CourseKey.from_string(course.strip()) for course in courses]
else:
course_keys = [
course.id for course in modulestore().get_course_summaries()
]
if skip is not None:
skip_keys = [CourseKey.from_string(course.strip()) for course in skip]
course_keys = [course_key for course_key in course_keys if course_key not in skip_keys]
return cls(course_keys)
def dump_courses_to_neo4j(self, credentials, override_cache=False):
"""
Method that iterates through a list of courses in a modulestore,
serializes them, then submits tasks to write them to neo4j.
Arguments:
credentials (dict): the necessary credentials to connect
to neo4j and create a py2neo `Graph` object
override_cache: serialize the courses even if they'be been recently
serialized
Returns: two lists--one of the courses that were successfully written
to neo4j and one of courses that were not.
"""
total_number_of_courses = len(self.course_keys)
submitted_courses = []
skipped_courses = []
graph = authenticate_and_create_graph(credentials)
for index, course_key in enumerate(self.course_keys):
# first, clear the request cache to prevent memory leaks
RequestCache.clear_request_cache()
log.info(
"Now submitting %s for export to neo4j: course %d of %d total courses",
course_key,
index + 1,
total_number_of_courses,
)
if not (override_cache or should_dump_course(course_key, graph)):
log.info("skipping submitting %s, since it hasn't changed", course_key)
skipped_courses.append(six.text_type(course_key))
continue
dump_course_to_neo4j.apply_async(
args=[six.text_type(course_key), credentials],
)
submitted_courses.append(six.text_type(course_key))
return submitted_courses, skipped_courses
def authenticate_and_create_graph(credentials):
"""
This function authenticates with neo4j and creates a py2neo graph object
Arguments:
credentials (dict): a dictionary of credentials used to authenticate,
and then create, a py2neo graph object.
Returns: a py2neo `Graph` object.
"""
host = credentials['host']
https_port = credentials['https_port']
http_port = credentials['http_port']
secure = credentials['secure']
neo4j_user = credentials['user']
neo4j_password = credentials['password']
authenticate(
"{host}:{port}".format(
host=host, port=https_port if secure else http_port
),
neo4j_user,
neo4j_password,
)
graph = Graph(
bolt=True,
password=neo4j_password,
user=neo4j_user,
https_port=https_port,
http_port=http_port,
host=host,
secure=secure,
)
return graph
|
agpl-3.0
|
scith/htpc-manager_ynh
|
sources/modules/sickrage.py
|
2
|
9516
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cherrypy
import htpc
from urllib import quote, urlencode
import requests
import logging
from cherrypy.lib.auth2 import require, member_of
from htpc.helpers import fix_basepath, get_image, striphttp
class Sickrage(object):
def __init__(self):
self.logger = logging.getLogger('modules.sickrage')
htpc.MODULES.append({
'name': 'Sickrage',
'id': 'sickrage',
'test': htpc.WEBDIR + 'sickrage/ping',
'fields': [
{'type': 'bool', 'label': 'Enable', 'name': 'sickrage_enable'},
{'type': 'text', 'label': 'Menu name', 'name': 'sickrage_name'},
{'type': 'text', 'label': 'IP / Host', 'placeholder': 'localhost', 'name': 'sickrage_host'},
{'type': 'text', 'label': 'Port', 'placeholder': '8081', 'name': 'sickrage_port'},
{'type': 'text', 'label': 'Basepath', 'placeholder': '/sickrage', 'name': 'sickrage_basepath'},
{'type': 'text', 'label': 'API key', 'name': 'sickrage_apikey'},
{'type': 'bool', 'label': 'Use SSL', 'name': 'sickrage_ssl'},
{'type': 'text', 'label': 'Reverse proxy link', 'placeholder': '', 'desc':'Reverse proxy link, e.g. https://sr.domain.com', 'name': 'sickrage_reverse_proxy_link'}
]
})
@cherrypy.expose()
@require()
def index(self):
return htpc.LOOKUP.get_template('sickrage.html').render(scriptname='sickrage', webinterface=self.webinterface())
def webinterface(self):
host = striphttp(htpc.settings.get('sickrage_host', ''))
port = str(htpc.settings.get('sickrage_port', ''))
apikey = htpc.settings.get('sickrage_apikey', '')
ssl = 's' if htpc.settings.get('sickrage_ssl', 0) else ''
sickrage_basepath = fix_basepath(htpc.settings.get('sickrage_basepath', '/'))
url = 'http%s://%s:%s%s' % (ssl, host, port, sickrage_basepath)
if htpc.settings.get('sickrage_reverse_proxy_link'):
url = htpc.settings.get('sickrage_reverse_proxy_link')
return url
@cherrypy.expose()
@require()
def view(self, indexerid):
if not (indexerid.isdigit()):
raise cherrypy.HTTPError('500 Error', 'Invalid show ID.')
self.logger.error('Invalid show ID was supplied: ' + str(indexerid))
return False
return htpc.LOOKUP.get_template('sickrage_view.html').render(scriptname='sickrage_view', indexerid=indexerid)
@cherrypy.expose()
@require(member_of(htpc.role_admin))
@cherrypy.tools.json_out()
def ping(self, sickrage_host, sickrage_port, sickrage_apikey, sickrage_basepath, sickrage_ssl=False, **kwargs):
ssl = 's' if sickrage_ssl else ''
self.logger.debug('Testing connectivity')
try:
sickrage_basepath = fix_basepath(sickrage_basepath)
url = 'http%s://%s:%s%sapi/%s/?cmd=sb.ping' % (ssl, striphttp(sickrage_host), sickrage_port, sickrage_basepath, sickrage_apikey)
self.logger.debug('Trying to contact sickrage via %s' % url)
response = requests.get(url, timeout=10, verify=False)
ret = response.json()
if ret.get('result') == 'success':
self.logger.debug('Sickrage connectivity test success')
return ret
except:
self.logger.error('Unable to contact sickrage via %s' % url)
return
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetShowList(self):
self.logger.debug('Fetching Show list')
return self.fetch('shows&sort=name', False, 200)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetNextAired(self):
self.logger.debug('Fetching Next Aired Episodes')
return self.fetch('future')
@cherrypy.expose()
@require()
def GetBanner(self, indexerid):
self.logger.debug('Fetching Banner')
cherrypy.response.headers['Content-Type'] = 'image/jpeg'
return self.fetch('show.getbanner&indexerid=' + indexerid, True)
@cherrypy.expose()
@require()
def GetPoster(self, indexerid):
self.logger.debug('Fetching Poster')
cherrypy.response.headers['Content-Type'] = 'image/jpeg'
return self.fetch('show.getposter&indexerid=' + indexerid, True)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetHistory(self, limit=''):
self.logger.debug('Fetching History')
return self.fetch('history&limit=' + limit)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetLogs(self):
self.logger.debug('Fetching Logs')
return self.fetch('logs&min_level=info')
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def AddShow(self, indexername='', indexerid='', **kwargs):
# indexername=tvrageid or tvdbid
self.logger.debug('Adding a Show')
return self.fetch('show.addnew&' + urlencode(kwargs))
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetShow(self, indexerid):
self.logger.debug('Fetching Show')
return self.fetch('show&indexerid=' + indexerid)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetEpisode(self, strShowID, strSeason, strEpisode):
return self.fetch('episode&indexerid=' + strShowID + '&season=' + strSeason + '&episode=' + strEpisode + '&full_path=1')
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def GetSeason(self, indexerid, season):
self.logger.debug('Fetching Season')
return self.fetch('show.seasons&indexerid=' + indexerid + '&season=' + season)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def Postprocess(self, path='', force_replace=False, return_data=False, is_priority=False, type=False):
self.logger.debug('Postprocess')
if path:
path = '&%s' % path
return self.fetch('postprocess' + path, False, 120)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def Restart(self):
self.logger.debug('Restart sr')
return self.fetch('sb.restart', False, 15)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def SearchEpisodeDownload(self, indexerid, season, episode):
self.logger.debug('Fetching Episode Downloads')
return self.fetch('episode.search&indexerid=' + indexerid + '&season=' + season + '&episode=' + episode, False, 45)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def SearchSubtitle(self, indexerid, season, episode):
self.logger.debug('Fetching subtitle')
return self.fetch('episode.subtitlesearch&indexerid=' + indexerid + '&season=' + season + '&episode=' + episode, False, 45)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def Shutdown(self):
self.logger.debug('Shutdown sickrage')
return self.fetch('sb.shutdown', False, 20)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def ForceFullUpdate(self, indexerid):
self.logger.debug('Force full update for indexerid %s' % indexerid)
return self.fetch('show.update&indexerid=' + indexerid)
@cherrypy.expose()
@require(member_of(htpc.role_user))
@cherrypy.tools.json_out()
def RescanFiles(self, indexerid):
self.logger.debug('Rescan all local files for indexerid %s' % indexerid)
return self.fetch('show.refresh&indexerid=' + indexerid)
@cherrypy.expose()
@cherrypy.tools.json_out()
@require(member_of(htpc.role_user))
def RemoveShow(self, indexerid, show_name=''):
self.logger.debug('Delete %s from Sickrage indexerid %s' % (show_name, indexerid))
return self.fetch('show.delete&indexerid=%s' % indexerid)
@cherrypy.expose()
@cherrypy.tools.json_out()
@require()
def SearchShow(self, query):
self.logger.debug('Searching tvdb and tvrage for %s query')
return self.fetch('sb.searchindexers&indexer=0&name=%s' % quote(query), False, 60)
@cherrypy.expose()
@require()
@cherrypy.tools.json_out()
def ShowsStats(self):
self.logger.debug('Grabbing tvrage statistics')
return self.fetch('shows.stats')
def fetch(self, cmd, img=False, timeout=20):
try:
host = striphttp(htpc.settings.get('sickrage_host', ''))
port = str(htpc.settings.get('sickrage_port', ''))
apikey = htpc.settings.get('sickrage_apikey', '')
ssl = 's' if htpc.settings.get('sickrage_ssl', 0) else ''
sickrage_basepath = fix_basepath(htpc.settings.get('sickrage_basepath', '/'))
url = 'http%s://%s:%s%sapi/%s/?cmd=%s' % (ssl, host, port, sickrage_basepath, apikey, cmd)
self.logger.debug('Fetching information from: %s' % url)
if img is True:
# Cache the images
return get_image(url)
res = requests.get(url, timeout=timeout, verify=False)
return res.json()
except Exception as e:
self.logger.error('Unable to fetch information')
self.logger.error(url)
self.logger.error(e)
return
|
gpl-3.0
|
NetApp/cinder
|
cinder/tests/unit/fake_utils.py
|
10
|
2735
|
# Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""This modules stubs out functions in cinder.utils."""
import re
from eventlet import greenthread
import six
_fake_execute_repliers = []
_fake_execute_log = []
def fake_execute_get_log():
return _fake_execute_log
def fake_execute_clear_log():
global _fake_execute_log
_fake_execute_log = []
def fake_execute_set_repliers(repliers):
"""Allows the client to configure replies to commands."""
global _fake_execute_repliers
_fake_execute_repliers = repliers
def fake_execute_default_reply_handler(*ignore_args, **ignore_kwargs):
"""A reply handler for commands that haven't been added to the reply list.
Returns empty strings for stdout and stderr.
"""
return '', ''
def fake_execute(*cmd_parts, **kwargs):
"""This function stubs out execute.
It optionally executes a preconfigued function to return expected data.
"""
global _fake_execute_repliers
process_input = kwargs.get('process_input', None)
check_exit_code = kwargs.get('check_exit_code', 0)
delay_on_retry = kwargs.get('delay_on_retry', True)
attempts = kwargs.get('attempts', 1)
run_as_root = kwargs.get('run_as_root', False)
cmd_str = ' '.join(str(part) for part in cmd_parts)
_fake_execute_log.append(cmd_str)
reply_handler = fake_execute_default_reply_handler
for fake_replier in _fake_execute_repliers:
if re.match(fake_replier[0], cmd_str):
reply_handler = fake_replier[1]
break
if isinstance(reply_handler, six.string_types):
# If the reply handler is a string, return it as stdout
reply = reply_handler, ''
else:
# Alternative is a function, so call it
reply = reply_handler(cmd_parts,
process_input=process_input,
delay_on_retry=delay_on_retry,
attempts=attempts,
run_as_root=run_as_root,
check_exit_code=check_exit_code)
# Replicate the sleep call in the real function
greenthread.sleep(0)
return reply
|
apache-2.0
|
cbclab/MDT
|
mdt/model_building/signal_noise_models.py
|
1
|
1426
|
from .parameters import FreeParameter, CurrentModelSignalParam
from .model_functions import SimpleModelCLFunction
__author__ = 'Robbert Harms'
__date__ = "2014-08-05"
__license__ = "LGPL v3"
__maintainer__ = "Robbert Harms"
__email__ = "[email protected]"
class SignalNoiseModel(SimpleModelCLFunction):
"""Signal noise models can add noise to the signal resulting from the model.
They require the signal resulting from the model and zero or more parameters and they return a new signal
with noise added. This should have a model signature like:
.. code-block:: c
double fname(double signal, <noise model parameters ...>);
For example, if the noise model has only one parameter 'sigma' the function should look like:
.. code-block:: c
double fname(double signal, double sigma);
The CL function should return a single double that represents the signal with the signal noise
added to it.
"""
class JohnsonNoise(SignalNoiseModel):
def __init__(self):
"""Johnson noise adds noise to the signal using the formula:
.. code-block:: c
sqrt(signal^2 + eta^2)
"""
super().__init__(
'double', 'JohnsonNoise', [
CurrentModelSignalParam('signal'),
FreeParameter('mot_float_type eta', False, 0.1, 0, 1e5)],
'return hypot(signal, (double)eta);')
|
lgpl-3.0
|
gsirow/fantasyfantasy
|
ff_project/settings.py
|
1
|
2017
|
"""
Django settings for ff_football project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y&3xt@1$e8j(i5!z6)oy*mv*$&(agzyuotx88lnle+c)n@s3g2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'ff_football.ff_football'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ff_football.urls'
WSGI_APPLICATION = 'ff_football.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
gpl-2.0
|
genova/rapidsms-senegal
|
build/lib.linux-i686-2.6/rapidsms/config.py
|
7
|
7691
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import os, log
from ConfigParser import SafeConfigParser
import logging
def to_list (item, separator=","):
return filter(None, map(lambda x: str(x).strip(), item.split(separator)))
class Config (object):
def __init__ (self, *paths):
self.parser = SafeConfigParser()
# read the configuration, and store the list of
# config files which were successfully parsed
self.sources = self.parser.read(paths)
self.raw_data = {}
self.normalized_data = {}
self.data = {}
# first pass: read in the raw data. it's all strings, since
# ConfigParser doesn't seem to decode unicode correctly (yet)
for sn in self.parser.sections():
items = self.parser.items(sn)
self.raw_data[sn] = dict(items)
# second pass: cast the values into int or bool where possible
# (mostly to avoid storing "false", which evaluates to True)
for sn in self.raw_data.keys():
self.normalized_data[sn] = {}
for key, val in self.raw_data[sn].items():
self.normalized_data[sn][key] = \
self.__normalize_value(val)
# third pass: iterate the normalized data, creating a
# dict (self.data) containing the "real" configuration,
# which may include things (magic, defaults, etc) not
# present in the raw_data or normalized_data
for sn in self.normalized_data.keys():
section_parser = "parse_%s_section" % (sn)
# if this section has a special parser, call
# it with the raw data, and store the result
if hasattr(self, section_parser):
self.data[sn] = \
getattr(self, section_parser)(
self.normalized_data[sn])
# no custom section parser, so
# just copy the raw data as-is
else:
self.data[sn] =\
self.normalized_data[sn].copy()
def __normalize_value (self, value):
"""Casts a string to a bool, int, or float, if it looks like it
should be one. This is a band-aid over the ini format, which
assumes all values to be strings. Examples:
"mudkips" => "mudkips" (str)
"false", "FALSE", "no" => False (bool)
"true", "TRUE", "yes" => True (bool)
"1.0", "0001.00" => 1.0 (float)
"0", "0000" => 0 (int)"""
# shortcut for string boolean values
if value.lower() in ["false", "no"]: return False
elif value.lower() in ["true", "yes"]: return True
# attempt to cast this value to an int, then a float. (a sloppy
# benchmark of this exception-catching algorithm indicates that
# it's faster than checking with a regexp)
for func in [int, float]:
try: func(value)
except: pass
# it's just a str
# (NOT A UNICODE)
return value
def __import_class (self, class_tmpl):
"""Given a full class name (ie, webapp.app.App), returns the
class object. There doesn't seem to be a built-in way of doing
this without mucking with __import__."""
# break the class name off the end of module template
# i.e. "ABCD.app.App" -> ("ABC.app", "App")
try:
split_module = class_tmpl.rsplit(".",1)
module = __import__(split_module[0], {}, {}, split_module[1:])
#module = __import__(class_tmpl, {}, {}, [])
# import the requested class or None
if len(split_module) > 1 and hasattr(module, split_module[-1]):
return getattr(module, split_module[-1])
else:
return module
except ImportError, e:
logging.error("App import error: " + str(e))
pass
def component_section (self, name):
# fetch the current config for this section
# from raw_data (or default to an empty dict),
# then copy it, so we don't alter the original
data = self.raw_data.get(name, {}).copy()
# although "name" and "type" are deliberately distinct (to enable multiple
# components of the same type to run concurrently), it's cumbersome to have
# to provide a type every single time, so default to the name
if not "type" in data:
data["type"] = name
return data
def app_section (self, name):
data = self.component_section(name)
data["module"] = data["type"]
# load the config.py for this app, if possible
config = self.__import_class("%s.config" % data["module"])
if config is not None:
# copy all of the names not starting with underscore (those are
# private or __magic__) into this component's default config
for var_name in dir(config):
if not var_name.startswith("_"):
data[var_name] = getattr(config, var_name)
# import the actual module, and add the path to the
# config - it might not always be in rapidsms/apps/%s
module_obj = self.__import_class(data["module"])
if module_obj: data["path"] = module_obj.__path__[0]
# return the component with the additional
# app-specific data included.
return data
def backend_section (self, name):
return self.component_section(name)
def parse_rapidsms_section (self, raw_section):
# "apps" and "backends" are strings of comma-separated
# component names. first, break them into real lists
app_names = to_list(raw_section["apps"])
backend_names = to_list(raw_section["backends"])
# run lists of component names through component_section,
# to transform into lists of dicts containing more meta-info
return { "apps": [self.app_section(n) for n in app_names],
"backends": [self.backend_section(n) for n in backend_names] }
def parse_log_section (self, raw_section):
output = {"level": log.LOG_LEVEL, "file": log.LOG_FILE}
output.update(raw_section)
return output
def parse_i18n_section (self, raw_section):
output = {}
if "default_language" in raw_section:
output.update( {"default_language" : raw_section["default_language"]} )
def _add_language_settings(setting):
if setting not in raw_section: return
output.update( {setting:[]} )
all_language_settings = to_list(raw_section[setting], separator="),(")
for language_settings in all_language_settings:
language = to_list( language_settings.strip('()') )
output[setting].append( language )
_add_language_settings("languages")
_add_language_settings("web_languages")
_add_language_settings("sms_languages")
# add a section for the locale paths
if "locale_paths" in raw_section:
output["locale_paths"] = to_list(raw_section["locale_paths"], ",")
return output
def __getitem__ (self, key):
return self.data[key]
def has_key (self, key):
return self.data.has_key(key)
__contains__ = has_key
|
bsd-3-clause
|
keyurpatel076/MissionPlannerGit
|
Lib/imghdr.py
|
259
|
3544
|
"""Recognize image file formats based on their first few bytes."""
__all__ = ["what"]
#-------------------------#
# Recognize image headers #
#-------------------------#
def what(file, h=None):
if h is None:
if isinstance(file, basestring):
f = open(file, 'rb')
h = f.read(32)
else:
location = file.tell()
h = file.read(32)
file.seek(location)
f = None
else:
f = None
try:
for tf in tests:
res = tf(h, f)
if res:
return res
finally:
if f: f.close()
return None
#---------------------------------#
# Subroutines per image file type #
#---------------------------------#
tests = []
def test_jpeg(h, f):
"""JPEG data in JFIF format"""
if h[6:10] == 'JFIF':
return 'jpeg'
tests.append(test_jpeg)
def test_exif(h, f):
"""JPEG data in Exif format"""
if h[6:10] == 'Exif':
return 'jpeg'
tests.append(test_exif)
def test_png(h, f):
if h[:8] == "\211PNG\r\n\032\n":
return 'png'
tests.append(test_png)
def test_gif(h, f):
"""GIF ('87 and '89 variants)"""
if h[:6] in ('GIF87a', 'GIF89a'):
return 'gif'
tests.append(test_gif)
def test_tiff(h, f):
"""TIFF (can be in Motorola or Intel byte order)"""
if h[:2] in ('MM', 'II'):
return 'tiff'
tests.append(test_tiff)
def test_rgb(h, f):
"""SGI image library"""
if h[:2] == '\001\332':
return 'rgb'
tests.append(test_rgb)
def test_pbm(h, f):
"""PBM (portable bitmap)"""
if len(h) >= 3 and \
h[0] == 'P' and h[1] in '14' and h[2] in ' \t\n\r':
return 'pbm'
tests.append(test_pbm)
def test_pgm(h, f):
"""PGM (portable graymap)"""
if len(h) >= 3 and \
h[0] == 'P' and h[1] in '25' and h[2] in ' \t\n\r':
return 'pgm'
tests.append(test_pgm)
def test_ppm(h, f):
"""PPM (portable pixmap)"""
if len(h) >= 3 and \
h[0] == 'P' and h[1] in '36' and h[2] in ' \t\n\r':
return 'ppm'
tests.append(test_ppm)
def test_rast(h, f):
"""Sun raster file"""
if h[:4] == '\x59\xA6\x6A\x95':
return 'rast'
tests.append(test_rast)
def test_xbm(h, f):
"""X bitmap (X10 or X11)"""
s = '#define '
if h[:len(s)] == s:
return 'xbm'
tests.append(test_xbm)
def test_bmp(h, f):
if h[:2] == 'BM':
return 'bmp'
tests.append(test_bmp)
#--------------------#
# Small test program #
#--------------------#
def test():
import sys
recursive = 0
if sys.argv[1:] and sys.argv[1] == '-r':
del sys.argv[1:2]
recursive = 1
try:
if sys.argv[1:]:
testall(sys.argv[1:], recursive, 1)
else:
testall(['.'], recursive, 1)
except KeyboardInterrupt:
sys.stderr.write('\n[Interrupted]\n')
sys.exit(1)
def testall(list, recursive, toplevel):
import sys
import os
for filename in list:
if os.path.isdir(filename):
print filename + '/:',
if recursive or toplevel:
print 'recursing down:'
import glob
names = glob.glob(os.path.join(filename, '*'))
testall(names, recursive, 0)
else:
print '*** directory (use -r) ***'
else:
print filename + ':',
sys.stdout.flush()
try:
print what(filename)
except IOError:
print '*** not found ***'
|
gpl-3.0
|
leighpauls/k2cro4
|
third_party/python_26/Lib/site-packages/win32/Demos/win32netdemo.py
|
18
|
8343
|
import sys
import win32api
import win32net
import win32netcon
import win32security
import getopt
import traceback
verbose_level = 0
server = None # Run on local machine.
def verbose(msg):
if verbose_level:
print msg
def CreateUser():
"Creates a new test user, then deletes the user"
testName = "PyNetTestUser"
try:
win32net.NetUserDel(server, testName)
print "Warning - deleted user before creating it!"
except win32net.error:
pass
d = {}
d['name'] = testName
d['password'] = 'deleteme'
d['priv'] = win32netcon.USER_PRIV_USER
d['comment'] = "Delete me - created by Python test code"
d['flags'] = win32netcon.UF_NORMAL_ACCOUNT | win32netcon.UF_SCRIPT
win32net.NetUserAdd(server, 1, d)
try:
try:
win32net.NetUserChangePassword(server, testName, "wrong", "new")
print "ERROR: NetUserChangePassword worked with a wrong password!"
except win32net.error:
pass
win32net.NetUserChangePassword(server, testName, "deleteme", "new")
finally:
win32net.NetUserDel(server, testName)
print "Created a user, changed their password, and deleted them!"
def UserEnum():
"Enumerates all the local users"
resume = 0
nuser = 0
while 1:
data, total, resume = win32net.NetUserEnum(server, 3, win32netcon.FILTER_NORMAL_ACCOUNT, resume)
verbose("Call to NetUserEnum obtained %d entries of %d total" % (len(data), total))
for user in data:
verbose("Found user %s" % user['name'])
nuser = nuser + 1
if not resume:
break
assert nuser, "Could not find any users!"
print "Enumerated all the local users"
def GroupEnum():
"Enumerates all the domain groups"
nmembers = 0
resume = 0
while 1:
data, total, resume = win32net.NetGroupEnum(server, 1, resume)
# print "Call to NetGroupEnum obtained %d entries of %d total" % (len(data), total)
for group in data:
verbose("Found group %(name)s:%(comment)s " % group)
memberresume = 0
while 1:
memberdata, total, memberresume = win32net.NetGroupGetUsers(server, group['name'], 0, resume)
for member in memberdata:
verbose(" Member %(name)s" % member)
nmembers = nmembers + 1
if memberresume==0:
break
if not resume:
break
assert nmembers, "Couldnt find a single member in a single group!"
print "Enumerated all the groups"
def LocalGroupEnum():
"Enumerates all the local groups"
resume = 0
nmembers = 0
while 1:
data, total, resume = win32net.NetLocalGroupEnum(server, 1, resume)
for group in data:
verbose("Found group %(name)s:%(comment)s " % group)
memberresume = 0
while 1:
memberdata, total, memberresume = win32net.NetLocalGroupGetMembers(server, group['name'], 2, resume)
for member in memberdata:
# Just for the sake of it, we convert the SID to a username
username, domain, type = win32security.LookupAccountSid(server, member['sid'])
nmembers = nmembers + 1
verbose(" Member %s (%s)" % (username, member['domainandname']))
if memberresume==0:
break
if not resume:
break
assert nmembers, "Couldnt find a single member in a single group!"
print "Enumerated all the local groups"
def ServerEnum():
"Enumerates all servers on the network"
resume = 0
while 1:
data, total, resume = win32net.NetServerEnum(server, 100, win32netcon.SV_TYPE_ALL, None, resume)
for s in data:
verbose("Found server %s" % s['name'])
# Now loop over the shares.
shareresume=0
while 1:
sharedata, total, shareresume = win32net.NetShareEnum(server, 2, shareresume)
for share in sharedata:
verbose(" %(netname)s (%(path)s):%(remark)s - in use by %(current_uses)d users" % share)
if not shareresume:
break
if not resume:
break
print "Enumerated all the servers on the network"
def LocalGroup(uname=None):
"Creates a local group, adds some members, deletes them, then removes the group"
level = 3
if uname is None: uname=win32api.GetUserName()
if uname.find("\\")<0:
uname = win32api.GetDomainName() + "\\" + uname
group = 'python_test_group'
# delete the group if it already exists
try:
win32net.NetLocalGroupDel(server, group)
print "WARNING: existing local group '%s' has been deleted."
except win32net.error:
pass
group_data = {'name': group}
win32net.NetLocalGroupAdd(server, 1, group_data)
try:
u={'domainandname': uname}
win32net.NetLocalGroupAddMembers(server, group, level, [u])
mem, tot, res = win32net.NetLocalGroupGetMembers(server, group, level)
print "members are", mem
if mem[0]['domainandname'] != uname:
print "ERROR: LocalGroup just added %s, but members are %r" % (uname, mem)
# Convert the list of dicts to a list of strings.
win32net.NetLocalGroupDelMembers(server, group, [m['domainandname'] for m in mem])
finally:
win32net.NetLocalGroupDel(server, group)
print "Created a local group, added and removed members, then deleted the group"
def GetInfo(userName=None):
"Dumps level 3 information about the current user"
if userName is None: userName=win32api.GetUserName()
print "Dumping level 3 information about user"
info = win32net.NetUserGetInfo(server, userName, 3)
for key, val in info.items():
verbose("%s=%s" % (key,val))
def SetInfo(userName=None):
"Attempts to change the current users comment, then set it back"
if userName is None: userName=win32api.GetUserName()
oldData = win32net.NetUserGetInfo(server, userName, 3)
try:
d = oldData.copy()
d["usr_comment"] = "Test comment"
win32net.NetUserSetInfo(server, userName, 3, d)
new = win32net.NetUserGetInfo(server, userName, 3)['usr_comment']
if str(new) != "Test comment":
raise RuntimeError, "Could not read the same comment back - got %s" % new
print "Changed the data for the user"
finally:
win32net.NetUserSetInfo(server, userName, 3, oldData)
def SetComputerInfo():
"Doesnt actually change anything, just make sure we could ;-)"
info = win32net.NetWkstaGetInfo(None, 502)
# *sob* - but we can't! Why not!!!
# win32net.NetWkstaSetInfo(None, 502, info)
def usage(tests):
import os
print "Usage: %s [-s server ] [-v] [Test ...]" % os.path.basename(sys.argv[0])
print " -v : Verbose - print more information"
print " -s : server - execute the tests against the named server"
print "where Test is one of:"
for t in tests:
print t.__name__,":", t.__doc__
print
print "If not tests are specified, all tests are run"
sys.exit(1)
def main():
tests = []
for ob in globals().values():
if type(ob)==type(main) and ob.__doc__:
tests.append(ob)
opts, args = getopt.getopt(sys.argv[1:], "s:hv")
for opt, val in opts:
if opt=="-s":
global server
server = val
if opt=="-h":
usage(tests)
if opt=="-v":
global verbose_level
verbose_level = verbose_level + 1
if len(args)==0:
print "Running all tests - use '-h' to see command-line options..."
dotests = tests
else:
dotests = []
for arg in args:
for t in tests:
if t.__name__==arg:
dotests.append(t)
break
else:
print "Test '%s' unknown - skipping" % arg
if not len(dotests):
print "Nothing to do!"
usage(tests)
for test in dotests:
try:
test()
except:
print "Test %s failed" % test.__name__
traceback.print_exc()
if __name__=='__main__':
main()
|
bsd-3-clause
|
mlperf/training_results_v0.5
|
v0.5.0/google/cloud_v3.8/resnet-tpuv3-8/code/resnet/model/models/official/benchmark/benchmark_uploader_main.py
|
6
|
2351
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Binary to upload benchmark generated by BenchmarkLogger to remote repo.
This library require google cloud bigquery lib as dependency, which can be
installed with:
> pip install --upgrade google-cloud-bigquery
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import uuid
from absl import app as absl_app
from absl import flags
from official.benchmark import benchmark_uploader
from official.utils.flags import core as flags_core
from official.utils.logs import logger
def main(_):
if not flags.FLAGS.benchmark_log_dir:
print("Usage: benchmark_uploader.py --benchmark_log_dir=/some/dir")
sys.exit(1)
uploader = benchmark_uploader.BigQueryUploader(
gcp_project=flags.FLAGS.gcp_project)
run_id = str(uuid.uuid4())
run_json_file = os.path.join(
flags.FLAGS.benchmark_log_dir, logger.BENCHMARK_RUN_LOG_FILE_NAME)
metric_json_file = os.path.join(
flags.FLAGS.benchmark_log_dir, logger.METRIC_LOG_FILE_NAME)
uploader.upload_benchmark_run_file(
flags.FLAGS.bigquery_data_set, flags.FLAGS.bigquery_run_table, run_id,
run_json_file)
uploader.upload_metric_file(
flags.FLAGS.bigquery_data_set, flags.FLAGS.bigquery_metric_table, run_id,
metric_json_file)
# Assume the run finished successfully before user invoke the upload script.
uploader.insert_run_status(
flags.FLAGS.bigquery_data_set, flags.FLAGS.bigquery_run_status_table,
run_id, logger.RUN_STATUS_SUCCESS)
if __name__ == "__main__":
flags_core.define_benchmark()
flags.adopt_module_key_flags(flags_core)
absl_app.run(main=main)
|
apache-2.0
|
massot/odoo
|
addons/hr_recruitment/report/__init__.py
|
442
|
1107
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_recruitment_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
lmregus/mywebsite
|
app/controllers/general.py
|
1
|
2346
|
from server import app
from server import login_manager
from flask import render_template
from flask import request
from flask import redirect
from flask import send_file
from flask import abort
from flask_login import login_user
from flask_login import logout_user
from flask_login import login_required
from models.code_snippet import CodeSnippet
from models.skill import Skill
from models.job import Job
from models.education import Degree
from models.site_user import SiteUser
from models.forms import LoginForm
@login_manager.user_loader
def load_user(user_id):
return SiteUser().get_by_id(user_id)
@app.route('/')
def index():
slug = '/'
skill = Skill()
job = Job()
jobs = job.get_all()
jobs.sort(key=lambda date:date.start_date, reverse=True)
education = Degree()
page_title = 'Home'
return render_template('index.html', slug = slug,
page_title = page_title,
skills = skill.get_all(),
jobs = jobs,
education = education.get_all())
@app.route('/resume')
def resume_page():
page_title = 'Resume'
slug = 'resume'
try:
return send_file('static/pdfs/LuisRegusCV3.pdf', attachment_filename='LuisRegusCV3.pdf')
except Exception as e:
return str(e)
@app.route('/code-snippets')
def code_snippets():
page_title = 'Code Snippets'
code_snippets = CodeSnippet().get_all()
slug = 'code-snippets'
return render_template('pages/code-snippets.html', slug = slug, page_title = page_title, code_snippets = code_snippets)
@app.route('/admin/login', methods=['GET', 'POST'])
@app.route('/admin/', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
data = {
'name': form.username.data,
'password': form.password.data
}
user = SiteUser().get_by_username(data['name'])
if user.is_authenticated(data):
login_user(user)
return redirect('/admin/code-snippet')
return render_template('admin/login.html', form=form)
@app.route('/admin/logout', methods=['GET'])
@login_required
def logout():
logout_user()
return redirect('/')
@app.errorhandler(404)
def page_not_found(e):
return render_template('error/404.html'), 404
|
mit
|
caot/intellij-community
|
python/helpers/docutils/transforms/universal.py
|
63
|
6577
|
# $Id: universal.py 6112 2009-09-03 07:27:59Z milde $
# Authors: David Goodger <[email protected]>; Ueli Schlaepfer
# Copyright: This module has been placed in the public domain.
"""
Transforms needed by most or all documents:
- `Decorations`: Generate a document's header & footer.
- `Messages`: Placement of system messages stored in
`nodes.document.transform_messages`.
- `TestMessages`: Like `Messages`, used on test runs.
- `FinalReferences`: Resolve remaining references.
"""
__docformat__ = 'reStructuredText'
import re
import sys
import time
from docutils import nodes, utils
from docutils.transforms import TransformError, Transform
class Decorations(Transform):
"""
Populate a document's decoration element (header, footer).
"""
default_priority = 820
def apply(self):
header_nodes = self.generate_header()
if header_nodes:
decoration = self.document.get_decoration()
header = decoration.get_header()
header.extend(header_nodes)
footer_nodes = self.generate_footer()
if footer_nodes:
decoration = self.document.get_decoration()
footer = decoration.get_footer()
footer.extend(footer_nodes)
def generate_header(self):
return None
def generate_footer(self):
# @@@ Text is hard-coded for now.
# Should be made dynamic (language-dependent).
settings = self.document.settings
if settings.generator or settings.datestamp or settings.source_link \
or settings.source_url:
text = []
if settings.source_link and settings._source \
or settings.source_url:
if settings.source_url:
source = settings.source_url
else:
source = utils.relative_path(settings._destination,
settings._source)
text.extend([
nodes.reference('', 'View document source',
refuri=source),
nodes.Text('.\n')])
if settings.datestamp:
datestamp = time.strftime(settings.datestamp, time.gmtime())
text.append(nodes.Text('Generated on: ' + datestamp + '.\n'))
if settings.generator:
text.extend([
nodes.Text('Generated by '),
nodes.reference('', 'Docutils', refuri=
'http://docutils.sourceforge.net/'),
nodes.Text(' from '),
nodes.reference('', 'reStructuredText', refuri='http://'
'docutils.sourceforge.net/rst.html'),
nodes.Text(' source.\n')])
return [nodes.paragraph('', '', *text)]
else:
return None
class ExposeInternals(Transform):
"""
Expose internal attributes if ``expose_internals`` setting is set.
"""
default_priority = 840
def not_Text(self, node):
return not isinstance(node, nodes.Text)
def apply(self):
if self.document.settings.expose_internals:
for node in self.document.traverse(self.not_Text):
for att in self.document.settings.expose_internals:
value = getattr(node, att, None)
if value is not None:
node['internal:' + att] = value
class Messages(Transform):
"""
Place any system messages generated after parsing into a dedicated section
of the document.
"""
default_priority = 860
def apply(self):
unfiltered = self.document.transform_messages
threshold = self.document.reporter.report_level
messages = []
for msg in unfiltered:
if msg['level'] >= threshold and not msg.parent:
messages.append(msg)
if messages:
section = nodes.section(classes=['system-messages'])
# @@@ get this from the language module?
section += nodes.title('', 'Docutils System Messages')
section += messages
self.document.transform_messages[:] = []
self.document += section
class FilterMessages(Transform):
"""
Remove system messages below verbosity threshold.
"""
default_priority = 870
def apply(self):
for node in self.document.traverse(nodes.system_message):
if node['level'] < self.document.reporter.report_level:
node.parent.remove(node)
class TestMessages(Transform):
"""
Append all post-parse system messages to the end of the document.
Used for testing purposes.
"""
default_priority = 880
def apply(self):
for msg in self.document.transform_messages:
if not msg.parent:
self.document += msg
class StripComments(Transform):
"""
Remove comment elements from the document tree (only if the
``strip_comments`` setting is enabled).
"""
default_priority = 740
def apply(self):
if self.document.settings.strip_comments:
for node in self.document.traverse(nodes.comment):
node.parent.remove(node)
class StripClassesAndElements(Transform):
"""
Remove from the document tree all elements with classes in
`self.document.settings.strip_elements_with_classes` and all "classes"
attribute values in `self.document.settings.strip_classes`.
"""
default_priority = 420
def apply(self):
if not (self.document.settings.strip_elements_with_classes
or self.document.settings.strip_classes):
return
# prepare dicts for lookup (not sets, for Python 2.2 compatibility):
self.strip_elements = dict(
[(key, None)
for key in (self.document.settings.strip_elements_with_classes
or [])])
self.strip_classes = dict(
[(key, None) for key in (self.document.settings.strip_classes
or [])])
for node in self.document.traverse(self.check_classes):
node.parent.remove(node)
def check_classes(self, node):
if isinstance(node, nodes.Element):
for class_value in node['classes'][:]:
if class_value in self.strip_classes:
node['classes'].remove(class_value)
if class_value in self.strip_elements:
return 1
|
apache-2.0
|
riveridea/gnuradio
|
gr-channels/python/channels/impairments.py
|
54
|
4755
|
#!/usr/bin/env python
##################################################
# Gnuradio Python Flow Graph
# Title: Radio Impairments Model
# Author: mettus
# Generated: Thu Aug 1 12:46:10 2013
##################################################
from gnuradio import analog
from gnuradio import blocks
from gnuradio import gr
from gnuradio.filter import firdes
import math
#Import locally
from phase_noise_gen import *
from iqbal_gen import *
from distortion_2_gen import *
from distortion_3_gen import *
class impairments(gr.hier_block2):
def __init__(self, phase_noise_mag=0, magbal=0, phasebal=0, q_ofs=0, i_ofs=0, freq_offset=0, gamma=0, beta=0):
gr.hier_block2.__init__(
self, "Radio Impairments Model",
gr.io_signature(1, 1, gr.sizeof_gr_complex*1),
gr.io_signature(1, 1, gr.sizeof_gr_complex*1),
)
##################################################
# Parameters
##################################################
self.phase_noise_mag = phase_noise_mag
self.magbal = magbal
self.phasebal = phasebal
self.q_ofs = q_ofs
self.i_ofs = i_ofs
self.freq_offset = freq_offset
self.gamma = gamma
self.beta = beta
##################################################
# Blocks
##################################################
self.channels_phase_noise_gen_0_0 = phase_noise_gen(math.pow(10.0,phase_noise_mag/20.0), .01)
self.channels_iqbal_gen_0 = iqbal_gen(magbal, phasebal)
self.channels_distortion_3_gen_0 = distortion_3_gen(beta)
self.channels_distortion_2_gen_0 = distortion_2_gen(gamma)
self.blocks_multiply_xx_0_0 = blocks.multiply_vcc(1)
self.blocks_multiply_xx_0 = blocks.multiply_vcc(1)
self.blocks_conjugate_cc_0 = blocks.conjugate_cc()
self.blocks_add_const_vxx_0 = blocks.add_const_vcc((i_ofs + q_ofs* 1j, ))
self.analog_sig_source_x_0 = analog.sig_source_c(1.0, analog.GR_COS_WAVE, freq_offset, 1, 0)
##################################################
# Connections
##################################################
self.connect((self.channels_phase_noise_gen_0_0, 0), (self.channels_distortion_3_gen_0, 0))
self.connect((self.blocks_multiply_xx_0, 0), (self, 0))
self.connect((self.blocks_add_const_vxx_0, 0), (self.blocks_multiply_xx_0, 1))
self.connect((self.analog_sig_source_x_0, 0), (self.blocks_multiply_xx_0, 0))
self.connect((self.blocks_multiply_xx_0_0, 0), (self.channels_phase_noise_gen_0_0, 0))
self.connect((self.analog_sig_source_x_0, 0), (self.blocks_conjugate_cc_0, 0))
self.connect((self, 0), (self.blocks_multiply_xx_0_0, 1))
self.connect((self.blocks_conjugate_cc_0, 0), (self.blocks_multiply_xx_0_0, 0))
self.connect((self.channels_iqbal_gen_0, 0), (self.blocks_add_const_vxx_0, 0))
self.connect((self.channels_distortion_3_gen_0, 0), (self.channels_distortion_2_gen_0, 0))
self.connect((self.channels_distortion_2_gen_0, 0), (self.channels_iqbal_gen_0, 0))
# QT sink close method reimplementation
def get_phase_noise_mag(self):
return self.phase_noise_mag
def set_phase_noise_mag(self, phase_noise_mag):
self.phase_noise_mag = phase_noise_mag
self.channels_phase_noise_gen_0_0.set_noise_mag(math.pow(10.0,self.phase_noise_mag/20.0))
def get_magbal(self):
return self.magbal
def set_magbal(self, magbal):
self.magbal = magbal
self.channels_iqbal_gen_0.set_magnitude(self.magbal)
def get_phasebal(self):
return self.phasebal
def set_phasebal(self, phasebal):
self.phasebal = phasebal
self.channels_iqbal_gen_0.set_phase(self.phasebal)
def get_q_ofs(self):
return self.q_ofs
def set_q_ofs(self, q_ofs):
self.q_ofs = q_ofs
self.blocks_add_const_vxx_0.set_k((self.i_ofs + self.q_ofs* 1j, ))
def get_i_ofs(self):
return self.i_ofs
def set_i_ofs(self, i_ofs):
self.i_ofs = i_ofs
self.blocks_add_const_vxx_0.set_k((self.i_ofs + self.q_ofs* 1j, ))
def get_freq_offset(self):
return self.freq_offset
def set_freq_offset(self, freq_offset):
self.freq_offset = freq_offset
self.analog_sig_source_x_0.set_frequency(self.freq_offset)
def get_gamma(self):
return self.gamma
def set_gamma(self, gamma):
self.gamma = gamma
self.channels_distortion_2_gen_0.set_beta(self.gamma)
def get_beta(self):
return self.beta
def set_beta(self, beta):
self.beta = beta
self.channels_distortion_3_gen_0.set_beta(self.beta)
|
gpl-3.0
|
hnoerdli/hussa
|
node_modules/npm/node_modules/node-gyp/gyp/gyptest.py
|
1752
|
8019
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__doc__ = """
gyptest.py -- test runner for GYP tests.
"""
import os
import optparse
import subprocess
import sys
class CommandRunner(object):
"""
Executor class for commands, including "commands" implemented by
Python functions.
"""
verbose = True
active = True
def __init__(self, dictionary={}):
self.subst_dictionary(dictionary)
def subst_dictionary(self, dictionary):
self._subst_dictionary = dictionary
def subst(self, string, dictionary=None):
"""
Substitutes (via the format operator) the values in the specified
dictionary into the specified command.
The command can be an (action, string) tuple. In all cases, we
perform substitution on strings and don't worry if something isn't
a string. (It's probably a Python function to be executed.)
"""
if dictionary is None:
dictionary = self._subst_dictionary
if dictionary:
try:
string = string % dictionary
except TypeError:
pass
return string
def display(self, command, stdout=None, stderr=None):
if not self.verbose:
return
if type(command) == type(()):
func = command[0]
args = command[1:]
s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
if type(command) == type([]):
# TODO: quote arguments containing spaces
# TODO: handle meta characters?
s = ' '.join(command)
else:
s = self.subst(command)
if not s.endswith('\n'):
s += '\n'
sys.stdout.write(s)
sys.stdout.flush()
def execute(self, command, stdout=None, stderr=None):
"""
Executes a single command.
"""
if not self.active:
return 0
if type(command) == type(''):
command = self.subst(command)
cmdargs = shlex.split(command)
if cmdargs[0] == 'cd':
command = (os.chdir,) + tuple(cmdargs[1:])
if type(command) == type(()):
func = command[0]
args = command[1:]
return func(*args)
else:
if stdout is sys.stdout:
# Same as passing sys.stdout, except python2.4 doesn't fail on it.
subout = None
else:
# Open pipe for anything else so Popen works on python2.4.
subout = subprocess.PIPE
if stderr is sys.stderr:
# Same as passing sys.stderr, except python2.4 doesn't fail on it.
suberr = None
elif stderr is None:
# Merge with stdout if stderr isn't specified.
suberr = subprocess.STDOUT
else:
# Open pipe for anything else so Popen works on python2.4.
suberr = subprocess.PIPE
p = subprocess.Popen(command,
shell=(sys.platform == 'win32'),
stdout=subout,
stderr=suberr)
p.wait()
if stdout is None:
self.stdout = p.stdout.read()
elif stdout is not sys.stdout:
stdout.write(p.stdout.read())
if stderr not in (None, sys.stderr):
stderr.write(p.stderr.read())
return p.returncode
def run(self, command, display=None, stdout=None, stderr=None):
"""
Runs a single command, displaying it first.
"""
if display is None:
display = command
self.display(display)
return self.execute(command, stdout, stderr)
class Unbuffered(object):
def __init__(self, fp):
self.fp = fp
def write(self, arg):
self.fp.write(arg)
self.fp.flush()
def __getattr__(self, attr):
return getattr(self.fp, attr)
sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
def is_test_name(f):
return f.startswith('gyptest') and f.endswith('.py')
def find_all_gyptest_files(directory):
result = []
for root, dirs, files in os.walk(directory):
if '.svn' in dirs:
dirs.remove('.svn')
result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
result.sort()
return result
def main(argv=None):
if argv is None:
argv = sys.argv
usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
parser = optparse.OptionParser(usage=usage)
parser.add_option("-a", "--all", action="store_true",
help="run all tests")
parser.add_option("-C", "--chdir", action="store", default=None,
help="chdir to the specified directory")
parser.add_option("-f", "--format", action="store", default='',
help="run tests with the specified formats")
parser.add_option("-G", '--gyp_option', action="append", default=[],
help="Add -G options to the gyp command line")
parser.add_option("-l", "--list", action="store_true",
help="list available tests and exit")
parser.add_option("-n", "--no-exec", action="store_true",
help="no execute, just print the command line")
parser.add_option("--passed", action="store_true",
help="report passed tests")
parser.add_option("--path", action="append", default=[],
help="additional $PATH directory")
parser.add_option("-q", "--quiet", action="store_true",
help="quiet, don't print test command lines")
opts, args = parser.parse_args(argv[1:])
if opts.chdir:
os.chdir(opts.chdir)
if opts.path:
extra_path = [os.path.abspath(p) for p in opts.path]
extra_path = os.pathsep.join(extra_path)
os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
if not args:
if not opts.all:
sys.stderr.write('Specify -a to get all tests.\n')
return 1
args = ['test']
tests = []
for arg in args:
if os.path.isdir(arg):
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
else:
if not is_test_name(os.path.basename(arg)):
print >>sys.stderr, arg, 'is not a valid gyp test name.'
sys.exit(1)
tests.append(arg)
if opts.list:
for test in tests:
print test
sys.exit(0)
CommandRunner.verbose = not opts.quiet
CommandRunner.active = not opts.no_exec
cr = CommandRunner()
os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
if not opts.quiet:
sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
passed = []
failed = []
no_result = []
if opts.format:
format_list = opts.format.split(',')
else:
# TODO: not duplicate this mapping from pylib/gyp/__init__.py
format_list = {
'aix5': ['make'],
'freebsd7': ['make'],
'freebsd8': ['make'],
'openbsd5': ['make'],
'cygwin': ['msvs'],
'win32': ['msvs', 'ninja'],
'linux2': ['make', 'ninja'],
'linux3': ['make', 'ninja'],
'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
}[sys.platform]
for format in format_list:
os.environ['TESTGYP_FORMAT'] = format
if not opts.quiet:
sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
gyp_options = []
for option in opts.gyp_option:
gyp_options += ['-G', option]
if gyp_options and not opts.quiet:
sys.stdout.write('Extra Gyp options: %s\n' % gyp_options)
for test in tests:
status = cr.run([sys.executable, test] + gyp_options,
stdout=sys.stdout,
stderr=sys.stderr)
if status == 2:
no_result.append(test)
elif status:
failed.append(test)
else:
passed.append(test)
if not opts.quiet:
def report(description, tests):
if tests:
if len(tests) == 1:
sys.stdout.write("\n%s the following test:\n" % description)
else:
fmt = "\n%s the following %d tests:\n"
sys.stdout.write(fmt % (description, len(tests)))
sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
if opts.passed:
report("Passed", passed)
report("Failed", failed)
report("No result from", no_result)
if failed:
return 1
else:
return 0
if __name__ == "__main__":
sys.exit(main())
|
mit
|
rmfitzpatrick/ansible
|
lib/ansible/module_utils/fortios.py
|
89
|
8000
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Benjamin Jolivot <[email protected]>, 2014
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import time
import traceback
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import env_fallback
# check for pyFG lib
try:
from pyFG import FortiOS, FortiConfig
from pyFG.exceptions import FailedCommit
HAS_PYFG = True
except ImportError:
HAS_PYFG = False
fortios_argument_spec = dict(
file_mode=dict(type='bool', default=False),
config_file=dict(type='path'),
host=dict(),
username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
password=dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
timeout=dict(type='int', default=60),
vdom=dict(type='str'),
backup=dict(type='bool', default=False),
backup_path=dict(type='path'),
backup_filename=dict(type='str'),
)
fortios_required_if = [
['file_mode', False, ['host', 'username', 'password']],
['file_mode', True, ['config_file']],
['backup', True, ['backup_path']],
]
fortios_mutually_exclusive = [
['config_file', 'host'],
['config_file', 'username'],
['config_file', 'password']
]
fortios_error_codes = {
'-3': "Object not found",
'-61': "Command error"
}
def backup(module, running_config):
backup_path = module.params['backup_path']
backup_filename = module.params['backup_filename']
if not os.path.exists(backup_path):
try:
os.mkdir(backup_path)
except:
module.fail_json(msg="Can't create directory {0} Permission denied ?".format(backup_path))
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
if 0 < len(backup_filename):
filename = '%s/%s' % (backup_path, backup_filename)
else:
filename = '%s/%s_config.%s' % (backup_path, module.params['host'], tstamp)
try:
open(filename, 'w').write(running_config)
except:
module.fail_json(msg="Can't create backup file {0} Permission denied ?".format(filename))
class AnsibleFortios(object):
def __init__(self, module):
if not HAS_PYFG:
module.fail_json(msg='Could not import the python library pyFG required by this module')
self.result = {
'changed': False,
}
self.module = module
def _connect(self):
if self.module.params['file_mode']:
self.forti_device = FortiOS('')
else:
host = self.module.params['host']
username = self.module.params['username']
password = self.module.params['password']
timeout = self.module.params['timeout']
vdom = self.module.params['vdom']
self.forti_device = FortiOS(host, username=username, password=password, timeout=timeout, vdom=vdom)
try:
self.forti_device.open()
except Exception as e:
self.module.fail_json(msg='Error connecting device. %s' % to_native(e),
exception=traceback.format_exc())
def load_config(self, path):
self.path = path
self._connect()
# load in file_mode
if self.module.params['file_mode']:
try:
f = open(self.module.params['config_file'], 'r')
running = f.read()
f.close()
except IOError as e:
self.module.fail_json(msg='Error reading configuration file. %s' % to_native(e),
exception=traceback.format_exc())
self.forti_device.load_config(config_text=running, path=path)
else:
# get config
try:
self.forti_device.load_config(path=path)
except Exception as e:
self.forti_device.close()
self.module.fail_json(msg='Error reading running config. %s' % to_native(e),
exception=traceback.format_exc())
# set configs in object
self.result['running_config'] = self.forti_device.running_config.to_text()
self.candidate_config = self.forti_device.candidate_config
# backup if needed
if self.module.params['backup']:
backup(self.module, self.forti_device.running_config.to_text())
def apply_changes(self):
change_string = self.forti_device.compare_config()
if change_string:
self.result['change_string'] = change_string
self.result['changed'] = True
# Commit if not check mode
if change_string and not self.module.check_mode:
if self.module.params['file_mode']:
try:
f = open(self.module.params['config_file'], 'w')
f.write(self.candidate_config.to_text())
f.close()
except IOError as e:
self.module.fail_json(msg='Error writing configuration file. %s' %
to_native(e), exception=traceback.format_exc())
else:
try:
self.forti_device.commit()
except FailedCommit as e:
# Something's wrong (rollback is automatic)
self.forti_device.close()
error_list = self.get_error_infos(e)
self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message)
self.forti_device.close()
self.module.exit_json(**self.result)
def del_block(self, block_id):
self.forti_device.candidate_config[self.path].del_block(block_id)
def add_block(self, block_id, block):
self.forti_device.candidate_config[self.path][block_id] = block
def get_error_infos(self, cli_errors):
error_list = []
for errors in cli_errors.args:
for error in errors:
error_code = error[0]
error_string = error[1]
error_type = fortios_error_codes.get(error_code, "unknown")
error_list.append(dict(error_code=error_code, error_type=error_type, error_string=error_string))
return error_list
def get_empty_configuration_block(self, block_name, block_type):
return FortiConfig(block_name, block_type)
|
gpl-3.0
|
cctaylor/googleads-python-lib
|
examples/dfp/v201408/line_item_creative_association_service/create_licas.py
|
4
|
1942
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new line item creative associations (LICAs) for an
existing line item and a set of creative ids.
To determine which LICAs exist, run get_all_licas.py."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
# Set the line item ID and creative IDs to associate.
LINE_ITEM_ID = 'INSERT_LINE_ITEM_ID_HERE'
CREATIVE_IDS = ['INSERT_CREATIVE_IDS_HERE']
def main(client, line_item_id, creative_ids):
# Initialize appropriate service.
lica_service = client.GetService(
'LineItemCreativeAssociationService', version='v201408')
licas = []
for creative_id in creative_ids:
licas.append({'creativeId': creative_id,
'lineItemId': line_item_id})
# Create the LICAs remotely.
licas = lica_service.createLineItemCreativeAssociations(licas)
# Display results.
if licas:
for lica in licas:
print ('LICA with line item id \'%s\', creative id \'%s\', and '
'status \'%s\' was created.' %
(lica['lineItemId'], lica['creativeId'], lica['status']))
else:
print 'No LICAs created.'
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, LINE_ITEM_ID, CREATIVE_IDS)
|
apache-2.0
|
lisette-espin/JANUS
|
python-code/coauthorship.py
|
1
|
6204
|
from __future__ import division, print_function, absolute_import
__author__ = 'lisette-espin'
################################################################################
### Local Dependencies
################################################################################
from org.gesis.libs import graph as c
from org.gesis.libs.graph import DataMatrix
from org.gesis.libs.janus import JANUS
from org.gesis.libs.hypothesis import Hypothesis
################################################################################
### Global Dependencies
################################################################################
from scipy.sparse import csr_matrix
import numpy as np
import os
import time
import matplotlib.pyplot as plt
import seaborn as sns; sns.set(); sns.set_style("whitegrid"); sns.set_style("ticks"); sns.set_context("notebook", font_scale=1.5, rc={"lines.linewidth": 2.5}); sns.set_style({'legend.frameon': True})
################################################################################
### CONSTANTS
################################################################################
ALGORITHM = 'coauthorship'
DEL=','
################################################################################
### Functions
################################################################################
def run_janus(algorithm,isdirected,isweighted,ismultigraph,dependency,output,kmax,klogscale,krank,tocsv=False):
### 1. create data
graph = DataMatrix(isdirected, isweighted, ismultigraph, dependency, algorithm, output)
if graph.exists():
graph.loadData()
else:
graph.extractData(getMatrix(['data'],output))
graph.saveData()
graph.showInfo()
graph.plotAdjacencyMatrix(graph.dataoriginal)
### 2. init JANUS
start = time.time()
janus = JANUS(graph, output)
### 3. create hypotheses
janus.createHypothesis('data')
janus.createHypothesis('uniform')
#janus.createHypothesis('selfloop')
m1 = getMatrix(['same-country'],output)
janus.createHypothesis('B1: same-country',m1)
m2 = getMatrix(['same-gender'],output)
janus.createHypothesis('B2: same-gender',m2)
m3 = getMatrix(['hierarchy'],output)
janus.createHypothesis('B3: hierarchy',m3)
m5 = getMatrix(['popularity-publications'],output)
janus.createHypothesis('B5: popularity-publications',m5)
m6 = getMatrix(['popularity-citations'],output)
janus.createHypothesis('B6: popularity-citations',m6)
m4 = m5.copy() + m6.copy()
janus.createHypothesis('B4: popularity',m4)
m7 = getMatrix(['proximity'],output)
janus.createHypothesis('B7: proximity',m7)
# plots
plot_matrix(m1,output,'B1_same-country.pdf')
plot_matrix(m2,output,'B2_same-gender.pdf')
plot_matrix(m3,output,'B3_hierarchy.pdf')
plot_matrix(m4,output,'B4_popularity.pdf')
plot_matrix(m5,output,'B5_popularity-publications.pdf')
plot_matrix(m6,output,'B6_popularity-citations.pdf')
plot_matrix(m7,output,'B7_proximity.pdf')
# ### 4. evidences
janus.generateEvidences(kmax,klogscale)
stop = time.time()
janus.showRank(krank)
janus.saveEvidencesToFile()
janus.evidences.pop('B7: proximity',None)
janus.evidences.pop('B6: popularity-citations',None)
janus.evidences.pop('B5: popularity-publications',None)
janus.plotEvidences(krank,figsize=(9, 5),bboxx=0.8,bboxy=0.6,fontsize='x-small')
janus.plotBayesFactors(krank,figsize=(9, 5),bboxx=0.8,bboxy=0.5,fontsize='x-small')
janus.saveReadme(start,stop)
# ### 5. Saving CSV (dense matrix)
if tocsv:
save_csv(output,'{}_data.csv'.format(algorithm),graph.dataoriginal)
save_csv(output,'{}_b1_same_country.csv'.format(algorithm),m1)
save_csv(output,'{}_b2_same_gender.csv'.format(algorithm),m2)
save_csv(output,'{}_b3_hierarchy.csv'.format(algorithm),m3)
save_csv(output,'{}_b4_popularity.csv'.format(algorithm),m4)
save_csv(output,'{}_uniform.csv'.format(algorithm),np.zeros((graph.nnodes,graph.nnodes)))
save_csv(output,'{}_selfloop.csv'.format(algorithm),np.diagflat(np.zeros(graph.nnodes)+1))
def save_csv(output,name,sparsematrix):
fn = os.path.join(output,name)
np.savetxt(fn, sparsematrix.toarray(), delimiter=",", fmt='%.5f')
print('{} CSV saved!'.format(fn))
def getMatrix(datasets,output):
data = None
for dataset in datasets:
fn = os.path.join(output,'{}.{}'.format(dataset,'matrix'))
if os.path.exists(fn):
if data is None:
data = np.loadtxt(fn,delimiter=DEL)
else:
data = np.add(data,np.loadtxt(fn,delimiter=DEL))
return csr_matrix(data)
def plot_matrix(m,path,name):
grid_kws = {"height_ratios": (.9, .05), "hspace": .3}
f, (ax, cbar_ax) = plt.subplots(2, gridspec_kw=grid_kws, figsize=(5,5))
ax = sns.heatmap(m.toarray(), ax=ax,
# annot=True,
cbar_ax=cbar_ax,
cbar_kws={"orientation": "horizontal"})
ax.set_xlabel('target nodes')
ax.set_ylabel('source nodes')
ax.xaxis.tick_top()
ax.yaxis.tick_right()
ax.tick_params(axis='x', colors='grey')
ax.tick_params(axis='y', colors='grey')
plt.setp( ax.xaxis.get_majorticklabels(), horizontalalignment='center' )
plt.setp( ax.yaxis.get_majorticklabels(), rotation=270, horizontalalignment='center', x=1.02 )
cbar_ax.set_title('edge multiplicity')
fn = os.path.join(path,name)
plt.savefig(fn, dpi=1200, bbox_inches='tight')
print('- plot adjacency done!')
plt.close()
################################################################################
### main
################################################################################
if __name__ == '__main__':
isdirected = False
isweighted = False
ismultigraph = True
dependency = c.GLOBAL
kmax = 10
klogscale = False
krank = 10
algorithm = ALGORITHM
output = '../resources/{}-{}'.format(algorithm,dependency)
tocsv = False
if not os.path.exists(output):
os.makedirs(output)
run_janus(algorithm,isdirected,isweighted,ismultigraph,dependency,output,kmax,klogscale,krank,tocsv)
|
mit
|
habibiefaried/ryu
|
ryu/ofproto/ofproto_v1_3.py
|
8
|
49991
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
OpenFlow 1.3 definitions.
"""
from ryu.lib import type_desc
from ryu.ofproto import nx_match
from ryu.ofproto import ofproto_utils
from ryu.ofproto import oxm_fields
from struct import calcsize
# struct ofp_header
OFP_HEADER_PACK_STR = '!BBHI'
OFP_HEADER_SIZE = 8
assert calcsize(OFP_HEADER_PACK_STR) == OFP_HEADER_SIZE
# enum ofp_type
OFPT_HELLO = 0 # Symmetric message
OFPT_ERROR = 1 # Symmetric message
OFPT_ECHO_REQUEST = 2 # Symmetric message
OFPT_ECHO_REPLY = 3 # Symmetric message
OFPT_EXPERIMENTER = 4 # Symmetric message
OFPT_FEATURES_REQUEST = 5 # Controller/switch message
OFPT_FEATURES_REPLY = 6 # Controller/switch message
OFPT_GET_CONFIG_REQUEST = 7 # Controller/switch message
OFPT_GET_CONFIG_REPLY = 8 # Controller/switch message
OFPT_SET_CONFIG = 9 # Controller/switch message
OFPT_PACKET_IN = 10 # Async message
OFPT_FLOW_REMOVED = 11 # Async message
OFPT_PORT_STATUS = 12 # Async message
OFPT_PACKET_OUT = 13 # Controller/switch message
OFPT_FLOW_MOD = 14 # Controller/switch message
OFPT_GROUP_MOD = 15 # Controller/switch message
OFPT_PORT_MOD = 16 # Controller/switch message
OFPT_TABLE_MOD = 17 # Controller/switch message
OFPT_MULTIPART_REQUEST = 18 # Controller/switch message
OFPT_MULTIPART_REPLY = 19 # Controller/switch message
OFPT_BARRIER_REQUEST = 20 # Controller/switch message
OFPT_BARRIER_REPLY = 21 # Controller/switch message
OFPT_QUEUE_GET_CONFIG_REQUEST = 22 # Controller/switch message
OFPT_QUEUE_GET_CONFIG_REPLY = 23 # Controller/switch message
OFPT_ROLE_REQUEST = 24 # Controller/switch message
OFPT_ROLE_REPLY = 25 # Controller/switch message
OFPT_GET_ASYNC_REQUEST = 26 # Controller/switch message
OFPT_GET_ASYNC_REPLY = 27 # Controller/switch message
OFPT_SET_ASYNC = 28 # Controller/switch message
OFPT_METER_MOD = 29 # Controller/switch message
# struct ofp_port
OFP_MAX_PORT_NAME_LEN = 16
OFP_ETH_ALEN = 6
OFP_ETH_ALEN_STR = str(OFP_ETH_ALEN)
_OFP_PORT_PACK_STR = 'I4x' + OFP_ETH_ALEN_STR + 's' + '2x' + \
str(OFP_MAX_PORT_NAME_LEN) + 's' + 'IIIIIIII'
OFP_PORT_PACK_STR = '!' + _OFP_PORT_PACK_STR
OFP_PORT_SIZE = 64
assert calcsize(OFP_PORT_PACK_STR) == OFP_PORT_SIZE
# enum ofp_port_config
OFPPC_PORT_DOWN = 1 << 0 # Port is administratively down.
OFPPC_NO_RECV = 1 << 2 # Drop all packets recieved by port.
OFPPC_NO_FWD = 1 << 5 # Drop packets forwarded to port.
OFPPC_NO_PACKET_IN = 1 << 6 # Do not send packet-in msgs for port.
# enum ofp_port_state
OFPPS_LINK_DOWN = 1 << 0 # No physical link present.
OFPPS_BLOCKED = 1 << 1 # Port is blocked.
OFPPS_LIVE = 1 << 2 # Live for Fast Failover Group.
# enum ofp_port_no
OFPP_MAX = 0xffffff00
OFPP_IN_PORT = 0xfffffff8 # Send the packet out the input port. This
# virtual port must be explicitly used
# in order to send back out of the input
# port.
OFPP_TABLE = 0xfffffff9 # Perform actions in flow table.
# NB: This can only be the destination
# port for packet-out messages.
OFPP_NORMAL = 0xfffffffa # Process with normal L2/L3 switching.
OFPP_FLOOD = 0xfffffffb # All physical ports except input port and
# those disabled by STP.
OFPP_ALL = 0xfffffffc # All physical ports except input port.
OFPP_CONTROLLER = 0xfffffffd # Send to controller.
OFPP_LOCAL = 0xfffffffe # Local openflow "port".
OFPP_ANY = 0xffffffff # Not associated with a physical port.
# All ones is used to indicate all queues in a port (for stats retrieval).
OFPQ_ALL = 0xffffffff
# enum ofp_port_features
OFPPF_10MB_HD = 1 << 0 # 10 Mb half-duplex rate support.
OFPPF_10MB_FD = 1 << 1 # 10 Mb full-duplex rate support.
OFPPF_100MB_HD = 1 << 2 # 100 Mb half-duplex rate support.
OFPPF_100MB_FD = 1 << 3 # 100 Mb full-duplex rate support.
OFPPF_1GB_HD = 1 << 4 # 1 Gb half-duplex rate support.
OFPPF_1GB_FD = 1 << 5 # 1 Gb full-duplex rate support.
OFPPF_10GB_FD = 1 << 6 # 10 Gb full-duplex rate support.
OFPPF_40GB_FD = 1 << 7 # 40 Gb full-duplex rate support.
OFPPF_100GB_FD = 1 << 8 # 100 Gb full-duplex rate support.
OFPPF_1TB_FD = 1 << 9 # 1 Tb full-duplex rate support.
OFPPF_OTHER = 1 << 10 # Other rate, not in the list.
OFPPF_COPPER = 1 << 11 # Copper medium.
OFPPF_FIBER = 1 << 12 # Fiber medium.
OFPPF_AUTONEG = 1 << 13 # Auto-negotiation.
OFPPF_PAUSE = 1 << 14 # Pause.
OFPPF_PAUSE_ASYM = 1 << 15 # Asymmetric pause.
# struct ofp_packet_queue
OFP_PACKET_QUEUE_PACK_STR = '!IIH6x'
OFP_PACKET_QUEUE_SIZE = 16
assert calcsize(OFP_PACKET_QUEUE_PACK_STR) == OFP_PACKET_QUEUE_SIZE
# enum ofp_queue_properties
OFPQT_MIN_RATE = 1 # Minimum datarate guaranteed.
OFPQT_MAX_RATE = 2 # Maximum datarate.
OFPQT_EXPERIMENTER = 0xffff # Experimenter defined property.
# struct ofp_queue_prop_header
OFP_QUEUE_PROP_HEADER_PACK_STR = '!HH4x'
OFP_QUEUE_PROP_HEADER_SIZE = 8
assert calcsize(OFP_QUEUE_PROP_HEADER_PACK_STR) == OFP_QUEUE_PROP_HEADER_SIZE
# struct ofp_queue_prop_min_rate
OFP_QUEUE_PROP_MIN_RATE_PACK_STR = '!H6x'
OFP_QUEUE_PROP_MIN_RATE_SIZE = 16
assert (calcsize(OFP_QUEUE_PROP_MIN_RATE_PACK_STR) +
OFP_QUEUE_PROP_HEADER_SIZE) == OFP_QUEUE_PROP_MIN_RATE_SIZE
# struct ofp_queue_prop_max_rate
OFP_QUEUE_PROP_MAX_RATE_PACK_STR = '!H6x'
OFP_QUEUE_PROP_MAX_RATE_SIZE = 16
assert (calcsize(OFP_QUEUE_PROP_MAX_RATE_PACK_STR) +
OFP_QUEUE_PROP_HEADER_SIZE) == OFP_QUEUE_PROP_MAX_RATE_SIZE
# struct ofp_queue_prop_experimenter
OFP_QUEUE_PROP_EXPERIMENTER_PACK_STR = '!I4x'
OFP_QUEUE_PROP_EXPERIMENTER_SIZE = 16
assert (calcsize(OFP_QUEUE_PROP_EXPERIMENTER_PACK_STR) +
OFP_QUEUE_PROP_HEADER_SIZE) == OFP_QUEUE_PROP_EXPERIMENTER_SIZE
# struct ofp_match
_OFP_MATCH_PACK_STR = 'HHBBBB'
OFP_MATCH_PACK_STR = '!' + _OFP_MATCH_PACK_STR
OFP_MATCH_SIZE = 8
assert calcsize(OFP_MATCH_PACK_STR) == OFP_MATCH_SIZE
# enum ofp_match_type
OFPMT_STANDARD = 0 # Deprecated
OFPMT_OXM = 1 # OpenFlow Extensible Match
# enum ofp_oxm_class
OFPXMC_NXM_0 = 0x0000 # Backward compatibility with NXM
OFPXMC_NXM_1 = 0x0001 # Backward compatibility with NXM
OFPXMC_OPENFLOW_BASIC = 0x8000 # Basic class for OpenFlow
OFPXMC_EXPERIMENTER = 0xFFFF # Experimenter class
# enum ofp_vlan_id
OFPVID_PRESENT = 0x1000 # bit that indicate that a VLAN id is set.
OFPVID_NONE = 0x0000 # No VLAN id was set.
# enum ofp_ipv6exthdr_flags
OFPIEH_NONEXT = 1 << 0 # "No next header" encountered.
OFPIEH_ESP = 1 << 1 # Encrypted Sec Payload header present.
OFPIEH_AUTH = 1 << 2 # Authentication header present.
OFPIEH_DEST = 1 << 3 # 1 or 2 dest headers present.
OFPIEH_FRAG = 1 << 4 # Fragment header present.
OFPIEH_ROUTER = 1 << 5 # Router header present.
OFPIEH_HOP = 1 << 6 # Hop-by-hop header present.
OFPIEH_UNREP = 1 << 7 # Unexpected repeats encountered.
OFPIEH_UNSEQ = 1 << 8 # Unexpected sequencing encountered.
# ofp_oxm_experimenter_header
OFP_OXM_EXPERIMENTER_HEADER_PACK_STR = '!II'
OFP_OXM_EXPERIMENTER_HEADER_SIZE = 8
assert (calcsize(OFP_OXM_EXPERIMENTER_HEADER_PACK_STR) ==
OFP_OXM_EXPERIMENTER_HEADER_SIZE)
# enum ofp_instruction_type
OFPIT_GOTO_TABLE = 1 # Setup the next table in the lookup pipeline.
OFPIT_WRITE_METADATA = 2 # Setup the metadata field for use later in
# pipeline.
OFPIT_WRITE_ACTIONS = 3 # Write the action(s) onto the datapath
# action set
OFPIT_APPLY_ACTIONS = 4 # Applies the action(s) immediately
OFPIT_CLEAR_ACTIONS = 5 # Clears all actions from the datapath action
# set
OFPIT_METER = 6 # Apply meter (rate limiter)
OFPIT_EXPERIMENTER = 0xFFFF # Experimenter instruction
# struct ofp_instruction_goto_table
OFP_INSTRUCTION_GOTO_TABLE_PACK_STR = '!HHB3x'
OFP_INSTRUCTION_GOTO_TABLE_SIZE = 8
assert (calcsize(OFP_INSTRUCTION_GOTO_TABLE_PACK_STR) ==
OFP_INSTRUCTION_GOTO_TABLE_SIZE)
# struct ofp_instruction_write_metadata
OFP_INSTRUCTION_WRITE_METADATA_PACK_STR = '!HH4xQQ'
OFP_INSTRUCTION_WRITE_METADATA_SIZE = 24
assert (calcsize(OFP_INSTRUCTION_WRITE_METADATA_PACK_STR) ==
OFP_INSTRUCTION_WRITE_METADATA_SIZE)
# struct ofp_instruction_actions
OFP_INSTRUCTION_ACTIONS_PACK_STR = '!HH4x'
OFP_INSTRUCTION_ACTIONS_SIZE = 8
assert (calcsize(OFP_INSTRUCTION_ACTIONS_PACK_STR) ==
OFP_INSTRUCTION_ACTIONS_SIZE)
# struct ofp_instruction_meter
OFP_INSTRUCTION_METER_PACK_STR = '!HHI'
OFP_INSTRUCTION_METER_SIZE = 8
assert calcsize(OFP_INSTRUCTION_METER_PACK_STR) == OFP_INSTRUCTION_METER_SIZE
# enum ofp_action_type
OFPAT_OUTPUT = 0 # Output to switch port.
OFPAT_COPY_TTL_OUT = 11 # Copy TTL "outwards" -- from
# next-to-outermost to outermost
OFPAT_COPY_TTL_IN = 12 # Copy TTL "inwards" -- from outermost to
# next-to-outermost
OFPAT_SET_MPLS_TTL = 15 # MPLS TTL.
OFPAT_DEC_MPLS_TTL = 16 # Decrement MPLS TTL
OFPAT_PUSH_VLAN = 17 # Push a new VLAN tag
OFPAT_POP_VLAN = 18 # Pop the outer VLAN tag
OFPAT_PUSH_MPLS = 19 # Push a new MPLS tag
OFPAT_POP_MPLS = 20 # Pop the outer MPLS tag
OFPAT_SET_QUEUE = 21 # Set queue id when outputting to a port
OFPAT_GROUP = 22 # Apply group
OFPAT_SET_NW_TTL = 23 # IP TTL.
OFPAT_DEC_NW_TTL = 24 # Decrement IP TTL.
OFPAT_SET_FIELD = 25 # Set a header field using OXM TLV format.
OFPAT_PUSH_PBB = 26 # Push a new PBB service tag (I-TAG)
OFPAT_POP_PBB = 27 # Pop the outer PBB service tag (I-TAG)
OFPAT_EXPERIMENTER = 0xffff
# struct ofp_action_header
OFP_ACTION_HEADER_PACK_STR = '!HH4x'
OFP_ACTION_HEADER_SIZE = 8
assert calcsize(OFP_ACTION_HEADER_PACK_STR) == OFP_ACTION_HEADER_SIZE
# struct ofp_action_output
OFP_ACTION_OUTPUT_PACK_STR = '!HHIH6x'
OFP_ACTION_OUTPUT_SIZE = 16
assert calcsize(OFP_ACTION_OUTPUT_PACK_STR) == OFP_ACTION_OUTPUT_SIZE
# enum ofp_controller_max_len
OFPCML_MAX = 0xffe5 # maximum max_len value which can be used to
# request a specific byte length.
OFPCML_NO_BUFFER = 0xffff # indicates that no buffering should be
# applied and the whole packet is to be
# sent to the controller.
# struct ofp_action_group
OFP_ACTION_GROUP_PACK_STR = '!HHI'
OFP_ACTION_GROUP_SIZE = 8
assert calcsize(OFP_ACTION_GROUP_PACK_STR) == OFP_ACTION_GROUP_SIZE
# struct ofp_action_set_queue
OFP_ACTION_SET_QUEUE_PACK_STR = '!HHI'
OFP_ACTION_SET_QUEUE_SIZE = 8
assert calcsize(OFP_ACTION_SET_QUEUE_PACK_STR) == OFP_ACTION_SET_QUEUE_SIZE
# struct ofp_action_mpls_ttl
OFP_ACTION_MPLS_TTL_PACK_STR = '!HHB3x'
OFP_ACTION_MPLS_TTL_SIZE = 8
assert calcsize(OFP_ACTION_MPLS_TTL_PACK_STR) == OFP_ACTION_MPLS_TTL_SIZE
# struct ofp_action_nw_ttl
OFP_ACTION_NW_TTL_PACK_STR = '!HHB3x'
OFP_ACTION_NW_TTL_SIZE = 8
assert calcsize(OFP_ACTION_NW_TTL_PACK_STR) == OFP_ACTION_NW_TTL_SIZE
# struct ofp_action_push
OFP_ACTION_PUSH_PACK_STR = '!HHH2x'
OFP_ACTION_PUSH_SIZE = 8
assert calcsize(OFP_ACTION_PUSH_PACK_STR) == OFP_ACTION_PUSH_SIZE
# struct ofp_action_pop_mpls
OFP_ACTION_POP_MPLS_PACK_STR = '!HHH2x'
OFP_ACTION_POP_MPLS_SIZE = 8
assert calcsize(OFP_ACTION_POP_MPLS_PACK_STR) == OFP_ACTION_POP_MPLS_SIZE
# struct ofp_action_set_field
OFP_ACTION_SET_FIELD_PACK_STR = '!HH4x'
OFP_ACTION_SET_FIELD_SIZE = 8
assert calcsize(OFP_ACTION_SET_FIELD_PACK_STR) == OFP_ACTION_SET_FIELD_SIZE
# struct ofp_action_experimenter_header
OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR = '!HHI'
OFP_ACTION_EXPERIMENTER_HEADER_SIZE = 8
assert (calcsize(OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR) ==
OFP_ACTION_EXPERIMENTER_HEADER_SIZE)
# ofp_switch_features
OFP_SWITCH_FEATURES_PACK_STR = '!QIBB2xII'
OFP_SWITCH_FEATURES_SIZE = 32
assert (calcsize(OFP_SWITCH_FEATURES_PACK_STR) + OFP_HEADER_SIZE ==
OFP_SWITCH_FEATURES_SIZE)
# enum ofp_capabilities
OFPC_FLOW_STATS = 1 << 0 # Flow statistics.
OFPC_TABLE_STATS = 1 << 1 # Table statistics.
OFPC_PORT_STATS = 1 << 2 # Port statistics.
OFPC_GROUP_STATS = 1 << 3 # Group statistics.
OFPC_IP_REASM = 1 << 5 # Can reassemble IP fragments.
OFPC_QUEUE_STATS = 1 << 6 # Queue statistics.
OFPC_PORT_BLOCKED = 1 << 8 # Switch will block looping ports.
# struct ofp_switch_config
OFP_SWITCH_CONFIG_PACK_STR = '!HH'
OFP_SWITCH_CONFIG_SIZE = 12
assert (calcsize(OFP_SWITCH_CONFIG_PACK_STR) + OFP_HEADER_SIZE ==
OFP_SWITCH_CONFIG_SIZE)
# enum ofp_config_flags
OFPC_FRAG_NORMAL = 0 # No special handling for fragments.
OFPC_FRAG_DROP = 1 # Drop fragments.
OFPC_FRAG_REASM = 2 # Reassemble (only if OFPC_IP_REASM set).
OFPC_FRAG_MASK = 3
# enum ofp_table
OFPTT_MAX = 0xfe
OFPTT_ALL = 0xff
# struct ofp_table_mod
OFP_TABLE_MOD_PACK_STR = '!B3xI'
OFP_TABLE_MOD_SIZE = 16
assert (calcsize(OFP_TABLE_MOD_PACK_STR) + OFP_HEADER_SIZE ==
OFP_TABLE_MOD_SIZE)
_OFP_FLOW_MOD_PACK_STR0 = 'QQBBHHHIIIH2x'
OFP_FLOW_MOD_PACK_STR = '!' + _OFP_FLOW_MOD_PACK_STR0 + _OFP_MATCH_PACK_STR
OFP_FLOW_MOD_PACK_STR0 = '!' + _OFP_FLOW_MOD_PACK_STR0
OFP_FLOW_MOD_SIZE = 56
assert (calcsize(OFP_FLOW_MOD_PACK_STR) + OFP_HEADER_SIZE ==
OFP_FLOW_MOD_SIZE)
# enum ofp_flow_mod_command
OFPFC_ADD = 0 # New flow.
OFPFC_MODIFY = 1 # Modify all matching flows.
OFPFC_MODIFY_STRICT = 2 # Modify entry strictly matching wildcards
OFPFC_DELETE = 3 # Delete all matching flows.
OFPFC_DELETE_STRICT = 4 # Strictly match wildcards and priority.
# By default, choose a priority in the middle.
OFP_DEFAULT_PRIORITY = 0x8000
# enum ofp_flow_mod_flags
OFPFF_SEND_FLOW_REM = 1 << 0 # Send flow removed message when flow
# expires or is deleted.
OFPFF_CHECK_OVERLAP = 1 << 1 # Check for overlapping entries first.
OFPFF_RESET_COUNTS = 1 << 2 # Reset flow packet and byte counts.
OFPFF_NO_PKT_COUNTS = 1 << 3 # Don't keep track of packet count.
OFPFF_NO_BYT_COUNTS = 1 << 4 # Don't keep track of byte count.
# struct ofp_group_mod
OFP_GROUP_MOD_PACK_STR = '!HBxI'
OFP_GROUP_MOD_SIZE = 16
assert (calcsize(OFP_GROUP_MOD_PACK_STR) + OFP_HEADER_SIZE ==
OFP_GROUP_MOD_SIZE)
# enum ofp_group_mod_command
OFPGC_ADD = 0 # New group.
OFPGC_MODIFY = 1 # Modify all matching groups.
OFPGC_DELETE = 2 # Delete all matching groups.
# enum ofp_group
OFPG_MAX = 0xffffff00 # Last usable group number.
# Fake groups
OFPG_ALL = 0xfffffffc # Represents all groups for group delete commands.
OFPG_ANY = 0xffffffff # Wildcard group used only for flow stats requests.
# Selects all flows regardless of group
# (including flows with no group).
# enum ofp_group_type
OFPGT_ALL = 0 # All (multicast/broadcast) group.
OFPGT_SELECT = 1 # Select group.
OFPGT_INDIRECT = 2 # Indirect group.
OFPGT_FF = 3 # Fast failover group.
# struct ofp_bucket
OFP_BUCKET_PACK_STR = '!HHII4x'
OFP_BUCKET_SIZE = 16
assert calcsize(OFP_BUCKET_PACK_STR) == OFP_BUCKET_SIZE
# struct ofp_port_mod
OFP_PORT_MOD_PACK_STR = '!I4x' + OFP_ETH_ALEN_STR + 's2xIII4x'
OFP_PORT_MOD_SIZE = 40
assert (calcsize(OFP_PORT_MOD_PACK_STR) + OFP_HEADER_SIZE ==
OFP_PORT_MOD_SIZE)
# struct ofp_meter_mod
OFP_METER_MOD_PACK_STR = '!HHI'
OFP_METER_MOD_SIZE = 16
assert (calcsize(OFP_METER_MOD_PACK_STR) + OFP_HEADER_SIZE ==
OFP_METER_MOD_SIZE)
# enum ofp_meter
OFPM_MAX = 0xffff0000
OFPM_SLOWPATH = 0xfffffffd # Meter for slow datapath, if any.
OFPM_CONTROLLER = 0xfffffffe # Meter for controller connection.
OFPM_ALL = 0xffffffff # Represents all meters for stat requests
# commands.
# enum ofp_meter_mod_command
OFPMC_ADD = 0 # New meter.
OFPMC_MODIFY = 1 # Modify specified meter.
OFPMC_DELETE = 2 # Delete specified meter.
# enum ofp_meter_flags
OFPMF_KBPS = 1 << 0 # Rate value in kb/s (kilo-bit per second).
OFPMF_PKTPS = 1 << 1 # Rate value in packet/sec.
OFPMF_BURST = 1 << 2 # Do burst size.
OFPMF_STATS = 1 << 3 # Collect statistics.
# struct ofp_meter_band_header
OFP_METER_BAND_HEADER_PACK_STR = '!HHII'
OFP_METER_BAND_HEADER_SIZE = 12
assert (calcsize(OFP_METER_BAND_HEADER_PACK_STR) ==
OFP_METER_BAND_HEADER_SIZE)
# enum ofp_meter_band_type
OFPMBT_DROP = 1 # Drop packet.
OFPMBT_DSCP_REMARK = 2 # Remark DSCP in the IP header.
OFPMBT_EXPERIMENTER = 0xFFFF # Experimenter meter band.
# struct ofp_meter_band_drop
OFP_METER_BAND_DROP_PACK_STR = '!HHII4x'
OFP_METER_BAND_DROP_SIZE = 16
assert (calcsize(OFP_METER_BAND_DROP_PACK_STR) ==
OFP_METER_BAND_DROP_SIZE)
# struct ofp_meter_band_dscp_remark
OFP_METER_BAND_DSCP_REMARK_PACK_STR = '!HHIIB3x'
OFP_METER_BAND_DSCP_REMARK_SIZE = 16
assert (calcsize(OFP_METER_BAND_DSCP_REMARK_PACK_STR) ==
OFP_METER_BAND_DSCP_REMARK_SIZE)
# struct ofp_meter_band_experimenter
OFP_METER_BAND_EXPERIMENTER_PACK_STR = '!HHIII'
OFP_METER_BAND_EXPERIMENTER_SIZE = 16
assert (calcsize(OFP_METER_BAND_EXPERIMENTER_PACK_STR) ==
OFP_METER_BAND_EXPERIMENTER_SIZE)
# struct ofp_multipart_request
OFP_MULTIPART_REQUEST_PACK_STR = '!HH4x'
OFP_MULTIPART_REQUEST_SIZE = 16
assert (calcsize(OFP_MULTIPART_REQUEST_PACK_STR) + OFP_HEADER_SIZE ==
OFP_MULTIPART_REQUEST_SIZE)
# enum ofp_multipart_request_flags
OFPMPF_REQ_MORE = 1 << 0 # More requests to follow.
# struct ofp_multipart_reply
OFP_MULTIPART_REPLY_PACK_STR = '!HH4x'
OFP_MULTIPART_REPLY_SIZE = 16
assert (calcsize(OFP_MULTIPART_REPLY_PACK_STR) + OFP_HEADER_SIZE ==
OFP_MULTIPART_REPLY_SIZE)
# enum ofp_multipart_reply_flags
OFPMPF_REPLY_MORE = 1 << 0 # More replies to follow.
# enum ofp_multipart_types
OFPMP_DESC = 0
OFPMP_FLOW = 1
OFPMP_AGGREGATE = 2
OFPMP_TABLE = 3
OFPMP_PORT_STATS = 4
OFPMP_QUEUE = 5
OFPMP_GROUP = 6
OFPMP_GROUP_DESC = 7
OFPMP_GROUP_FEATURES = 8
OFPMP_METER = 9
OFPMP_METER_CONFIG = 10
OFPMP_METER_FEATURES = 11
OFPMP_TABLE_FEATURES = 12
OFPMP_PORT_DESC = 13
OFPMP_EXPERIMENTER = 0xffff
# struct ofp_desc
DESC_STR_LEN = 256
DESC_STR_LEN_STR = str(DESC_STR_LEN)
SERIAL_NUM_LEN = 32
SERIAL_NUM_LEN_STR = str(SERIAL_NUM_LEN)
OFP_DESC_PACK_STR = '!' + \
DESC_STR_LEN_STR + 's' + \
DESC_STR_LEN_STR + 's' + \
DESC_STR_LEN_STR + 's' + \
SERIAL_NUM_LEN_STR + 's' + \
DESC_STR_LEN_STR + 's'
OFP_DESC_SIZE = 1056
assert calcsize(OFP_DESC_PACK_STR) == OFP_DESC_SIZE
# struct ofp_flow_stats_request
_OFP_FLOW_STATS_REQUEST_0_PACK_STR = 'B3xII4xQQ'
OFP_FLOW_STATS_REQUEST_0_PACK_STR = '!' + _OFP_FLOW_STATS_REQUEST_0_PACK_STR
OFP_FLOW_STATS_REQUEST_0_SIZE = 32
assert (calcsize(OFP_FLOW_STATS_REQUEST_0_PACK_STR) ==
OFP_FLOW_STATS_REQUEST_0_SIZE)
OFP_FLOW_STATS_REQUEST_PACK_STR = (OFP_FLOW_STATS_REQUEST_0_PACK_STR +
_OFP_MATCH_PACK_STR)
OFP_FLOW_STATS_REQUEST_SIZE = 40
assert (calcsize(OFP_FLOW_STATS_REQUEST_PACK_STR) ==
OFP_FLOW_STATS_REQUEST_SIZE)
# struct ofp_flow_stats
_OFP_FLOW_STATS_0_PACK_STR = 'HBxIIHHHH4xQQQ'
OFP_FLOW_STATS_0_PACK_STR = '!' + _OFP_FLOW_STATS_0_PACK_STR
OFP_FLOW_STATS_0_SIZE = 48
assert calcsize(OFP_FLOW_STATS_0_PACK_STR) == OFP_FLOW_STATS_0_SIZE
OFP_FLOW_STATS_PACK_STR = (OFP_FLOW_STATS_0_PACK_STR +
_OFP_MATCH_PACK_STR)
OFP_FLOW_STATS_SIZE = 56
assert calcsize(OFP_FLOW_STATS_PACK_STR) == OFP_FLOW_STATS_SIZE
# struct ofp_flow_stats_request
_OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR = 'B3xII4xQQ'
OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR = '!' + \
_OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR
OFP_AGGREGATE_STATS_REQUEST_0_SIZE = 32
assert (calcsize(OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR) ==
OFP_AGGREGATE_STATS_REQUEST_0_SIZE)
OFP_AGGREGATE_STATS_REQUEST_PACK_STR = \
OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR + _OFP_MATCH_PACK_STR
OFP_AGGREGATE_STATS_REQUEST_SIZE = 40
assert (calcsize(OFP_AGGREGATE_STATS_REQUEST_PACK_STR) ==
OFP_AGGREGATE_STATS_REQUEST_SIZE)
# struct ofp_aggregate_stats_request
OFP_AGGREGATE_STATS_REQUEST_PACK_STR = '!B3xII4xQQ' + _OFP_MATCH_PACK_STR
OFP_AGGREGATE_STATS_REQUEST_SIZE = 40
assert (calcsize(OFP_AGGREGATE_STATS_REQUEST_PACK_STR) ==
OFP_AGGREGATE_STATS_REQUEST_SIZE)
# struct ofp_aggregate_stats_reply
OFP_AGGREGATE_STATS_REPLY_PACK_STR = '!QQI4x'
OFP_AGGREGATE_STATS_REPLY_SIZE = 24
assert (calcsize(OFP_AGGREGATE_STATS_REPLY_PACK_STR) ==
OFP_AGGREGATE_STATS_REPLY_SIZE)
# struct ofp_table_stats
OFP_TABLE_STATS_PACK_STR = '!B3xIQQ'
OFP_TABLE_STATS_SIZE = 24
assert calcsize(OFP_TABLE_STATS_PACK_STR) == OFP_TABLE_STATS_SIZE
# struct ofp_table_features
OFP_MAX_TABLE_NAME_LEN = 32
OFP_MAX_TABLE_NAME_LEN_STR = str(OFP_MAX_TABLE_NAME_LEN)
OFP_TABLE_FEATURES_PACK_STR = '!HB5x' + OFP_MAX_TABLE_NAME_LEN_STR + \
's' + 'QQII'
OFP_TABLE_FEATURES_SIZE = 64
assert (calcsize(OFP_TABLE_FEATURES_PACK_STR) ==
OFP_TABLE_FEATURES_SIZE)
# enum ofp_table_feature_prop_type
OFPTFPT_INSTRUCTIONS = 0
OFPTFPT_INSTRUCTIONS_MISS = 1
OFPTFPT_NEXT_TABLES = 2
OFPTFPT_NEXT_TABLES_MISS = 3
OFPTFPT_WRITE_ACTIONS = 4
OFPTFPT_WRITE_ACTIONS_MISS = 5
OFPTFPT_APPLY_ACTIONS = 6
OFPTFPT_APPLY_ACTIONS_MISS = 7
OFPTFPT_MATCH = 8
OFPTFPT_WILDCARDS = 10
OFPTFPT_WRITE_SETFIELD = 12
OFPTFPT_WRITE_SETFIELD_MISS = 13
OFPTFPT_APPLY_SETFIELD = 14
OFPTFPT_APPLY_SETFIELD_MISS = 15
OFPTFPT_EXPERIMENTER = 0xFFFE
OFPTFPT_EXPERIMENTER_MISS = 0xFFFF
# struct ofp_table_feature_prop_instructions
OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_PACK_STR = '!HH'
OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_SIZE = 4
assert (calcsize(OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_PACK_STR) ==
OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_SIZE)
# struct ofp_table_feature_prop_next_tables
OFP_TABLE_FEATURE_PROP_NEXT_TABLES_PACK_STR = '!HH'
OFP_TABLE_FEATURE_PROP_NEXT_TABLES_SIZE = 4
assert (calcsize(OFP_TABLE_FEATURE_PROP_NEXT_TABLES_PACK_STR) ==
OFP_TABLE_FEATURE_PROP_NEXT_TABLES_SIZE)
# struct ofp_table_feature_prop_actions
OFP_TABLE_FEATURE_PROP_ACTIONS_PACK_STR = '!HH'
OFP_TABLE_FEATURE_PROP_ACTIONS_SIZE = 4
assert (calcsize(OFP_TABLE_FEATURE_PROP_ACTIONS_PACK_STR) ==
OFP_TABLE_FEATURE_PROP_ACTIONS_SIZE)
# struct ofp_table_feature_prop_oxm
OFP_TABLE_FEATURE_PROP_OXM_PACK_STR = '!HH'
OFP_TABLE_FEATURE_PROP_OXM_SIZE = 4
assert (calcsize(OFP_TABLE_FEATURE_PROP_OXM_PACK_STR) ==
OFP_TABLE_FEATURE_PROP_OXM_SIZE)
# struct ofp_port_stats_request
OFP_PORT_STATS_REQUEST_PACK_STR = '!I4x'
OFP_PORT_STATS_REQUEST_SIZE = 8
assert (calcsize(OFP_PORT_STATS_REQUEST_PACK_STR) ==
OFP_PORT_STATS_REQUEST_SIZE)
# struct ofp_port_stats
OFP_PORT_STATS_PACK_STR = '!I4xQQQQQQQQQQQQII'
OFP_PORT_STATS_SIZE = 112
assert calcsize(OFP_PORT_STATS_PACK_STR) == OFP_PORT_STATS_SIZE
# struct ofp_queue_stats_request
OFP_QUEUE_STATS_REQUEST_PACK_STR = '!II'
OFP_QUEUE_STATS_REQUEST_SIZE = 8
assert (calcsize(OFP_QUEUE_STATS_REQUEST_PACK_STR) ==
OFP_QUEUE_STATS_REQUEST_SIZE)
# struct ofp_queue_stats
OFP_QUEUE_STATS_PACK_STR = '!IIQQQII'
OFP_QUEUE_STATS_SIZE = 40
assert calcsize(OFP_QUEUE_STATS_PACK_STR) == OFP_QUEUE_STATS_SIZE
# struct ofp_group_stats_request
OFP_GROUP_STATS_REQUEST_PACK_STR = '!I4x'
OFP_GROUP_STATS_REQUEST_SIZE = 8
assert (calcsize(OFP_GROUP_STATS_REQUEST_PACK_STR) ==
OFP_GROUP_STATS_REQUEST_SIZE)
# struct ofp_group_stats
OFP_GROUP_STATS_PACK_STR = '!H2xII4xQQII'
OFP_GROUP_STATS_SIZE = 40
assert calcsize(OFP_GROUP_STATS_PACK_STR) == OFP_GROUP_STATS_SIZE
# struct ofp_bucket_counter
OFP_BUCKET_COUNTER_PACK_STR = '!QQ'
OFP_BUCKET_COUNTER_SIZE = 16
assert calcsize(OFP_BUCKET_COUNTER_PACK_STR) == OFP_BUCKET_COUNTER_SIZE
# struct ofp_group_desc
OFP_GROUP_DESC_PACK_STR = '!HBxI'
OFP_GROUP_DESC_SIZE = 8
assert calcsize(OFP_GROUP_DESC_PACK_STR) == OFP_GROUP_DESC_SIZE
# struct ofp_group_desc_stats
OFP_GROUP_DESC_STATS_PACK_STR = OFP_GROUP_DESC_PACK_STR
OFP_GROUP_DESC_STATS_SIZE = OFP_GROUP_DESC_SIZE
assert calcsize(OFP_GROUP_DESC_STATS_PACK_STR) == OFP_GROUP_DESC_STATS_SIZE
# struct ofp_group_features
OFP_GROUP_FEATURES_PACK_STR = '!II4I4I'
OFP_GROUP_FEATURES_SIZE = 40
assert calcsize(OFP_GROUP_FEATURES_PACK_STR) == OFP_GROUP_FEATURES_SIZE
# enum ofp_group_capabilities
OFPGFC_SELECT_WEIGHT = 1 << 0 # Support weight for select groups.
OFPGFC_SELECT_LIVENESS = 1 << 1 # Support liveness for select groups.
OFPGFC_CHAINING = 1 << 2 # Support chaining groups.
OFPGFC_CHAINING_CHECKS = 1 << 3 # Check chaining for loops and delete
# struct ofp_meter_multipart_request
OFP_METER_MULTIPART_REQUEST_PACK_STR = '!I4x'
OFP_METER_MULTIPART_REQUEST_SIZE = 8
assert (calcsize(OFP_METER_MULTIPART_REQUEST_PACK_STR) ==
OFP_METER_MULTIPART_REQUEST_SIZE)
# struct ofp_meter_stats
OFP_METER_STATS_PACK_STR = '!IH6xIQQII'
OFP_METER_STATS_SIZE = 40
assert calcsize(OFP_METER_STATS_PACK_STR) == OFP_METER_STATS_SIZE
# struct ofp_meter_band_stats
OFP_METER_BAND_STATS_PACK_STR = '!QQ'
OFP_METER_BAND_STATS_SIZE = 16
assert (calcsize(OFP_METER_BAND_STATS_PACK_STR) ==
OFP_METER_BAND_STATS_SIZE)
# struct ofp_meter_config
OFP_METER_CONFIG_PACK_STR = '!HHI'
OFP_METER_CONFIG_SIZE = 8
assert calcsize(OFP_METER_CONFIG_PACK_STR) == OFP_METER_CONFIG_SIZE
# struct ofp_meter_features
OFP_METER_FEATURES_PACK_STR = '!IIIBB2x'
OFP_METER_FEATURES_SIZE = 16
assert (calcsize(OFP_METER_FEATURES_PACK_STR) ==
OFP_METER_FEATURES_SIZE)
# struct ofp_experimenter_multipart_header
OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR = '!II'
OFP_EXPERIMENTER_MULTIPART_HEADER_SIZE = 8
assert (calcsize(OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR) ==
OFP_EXPERIMENTER_MULTIPART_HEADER_SIZE)
# struct ofp_queue_get_config_request
OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR = '!I4x'
OFP_QUEUE_GET_CONFIG_REQUEST_SIZE = 16
assert (calcsize(OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR) +
OFP_HEADER_SIZE) == OFP_QUEUE_GET_CONFIG_REQUEST_SIZE
# struct ofp_queue_get_config_reply
OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR = '!I4x'
OFP_QUEUE_GET_CONFIG_REPLY_SIZE = 16
assert (calcsize(OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR) +
OFP_HEADER_SIZE) == OFP_QUEUE_GET_CONFIG_REPLY_SIZE
# struct ofp_packet_out
OFP_PACKET_OUT_PACK_STR = '!IIH6x'
OFP_PACKET_OUT_SIZE = 24
assert (calcsize(OFP_PACKET_OUT_PACK_STR) + OFP_HEADER_SIZE ==
OFP_PACKET_OUT_SIZE)
# struct ofp_role_request
OFP_ROLE_REQUEST_PACK_STR = '!I4xQ'
OFP_ROLE_REQUEST_SIZE = 24
assert (calcsize(OFP_ROLE_REQUEST_PACK_STR) + OFP_HEADER_SIZE ==
OFP_ROLE_REQUEST_SIZE)
# enum ofp_controller_role
OFPCR_ROLE_NOCHANGE = 0 # Don't change current role.
OFPCR_ROLE_EQUAL = 1 # Default role, full access.
OFPCR_ROLE_MASTER = 2 # Full access, at most one master.
OFPCR_ROLE_SLAVE = 3 # Read-only access.
# struct ofp_async_config
OFP_ASYNC_CONFIG_PACK_STR = '!2I2I2I'
OFP_ASYNC_CONFIG_SIZE = 32
assert (calcsize(OFP_ASYNC_CONFIG_PACK_STR) + OFP_HEADER_SIZE ==
OFP_ASYNC_CONFIG_SIZE)
# struct ofp_packet_in
OFP_PACKET_IN_PACK_STR = '!IHBBQ'
OFP_PACKET_IN_SIZE = 32
assert (calcsize(OFP_PACKET_IN_PACK_STR) + OFP_MATCH_SIZE + OFP_HEADER_SIZE ==
OFP_PACKET_IN_SIZE)
# enum ofp_packet_in_reason
OFPR_NO_MATCH = 0 # No matching flow.
OFPR_ACTION = 1 # Action explicitly output to controller.
OFPR_INVALID_TTL = 2 # Packet has invalid TTL.
# struct ofp_flow_removed
_OFP_FLOW_REMOVED_PACK_STR0 = 'QHBBIIHHQQ'
OFP_FLOW_REMOVED_PACK_STR = '!' + _OFP_FLOW_REMOVED_PACK_STR0 + \
_OFP_MATCH_PACK_STR
OFP_FLOW_REMOVED_PACK_STR0 = '!' + _OFP_FLOW_REMOVED_PACK_STR0
OFP_FLOW_REMOVED_SIZE = 56
assert (calcsize(OFP_FLOW_REMOVED_PACK_STR) + OFP_HEADER_SIZE ==
OFP_FLOW_REMOVED_SIZE)
# enum ofp_flow_removed_reason
OFPRR_IDLE_TIMEOUT = 0 # Flow idle time exceeded idle_timeout.
OFPRR_HARD_TIMEOUT = 1 # Time exceeded hard_timeout.
OFPRR_DELETE = 2 # Evicted by a DELETE flow mod.
OFPRR_GROUP_DELETE = 3 # Group was removed.
# struct ofp_port_status
OFP_PORT_STATUS_PACK_STR = '!B7x' + _OFP_PORT_PACK_STR
OFP_PORT_STATUS_DESC_OFFSET = OFP_HEADER_SIZE + 8
OFP_PORT_STATUS_SIZE = 80
assert (calcsize(OFP_PORT_STATUS_PACK_STR) + OFP_HEADER_SIZE ==
OFP_PORT_STATUS_SIZE)
# enum ofp_port_reason
OFPPR_ADD = 0 # The port was added.
OFPPR_DELETE = 1 # The port was removed.
OFPPR_MODIFY = 2 # Some attribute of the port has changed.
# OFPMP_EXPERIMENTER
# struct onf_experimenter_multipart_msg
# (experimenter == ONF_EXPERIMENTER_ID)
ONFMP_FLOW_MONITOR = 1870
# EXT-187 seems to have a lot of flaws.
# XXX the spec mentions ONFST_FLOW_MONITOR in some places.
# we assume it's same as ONFMP_FLOW_MONITOR.
# XXX the spec uses OFPP_NONE. we assume it means OFPP_ANY.
# XXX onf_flow_update_full.length is commented to be 24.
# but it needs to tell the actual length of instructions.
# we assume it's variable.
# XXX the spec seems confused between instructions and actions
# for onf_flow_update_full/ONFFMF_ACTIONS. we assume they all
# are instructions.
# XXX the spec does not define payload structures for any of
# ONFT_FLOW_MONITOR_CANCEL, ONFT_FLOW_MONITOR_PAUSED, or
# ONFT_FLOW_MONITOR_RESUMED. we assume they are same as NX.
# according to NX spec (OVS nicira-ext.h and ofp-msg.h):
# NXT_FLOW_MONITOR_CANCEL: a single u32 'id'.
# NXT_FLOW_MONITOR_PAUSED/RESUMED: empty payload
# (OF1.4 uses something different; OFPFMC_DELETE for CANCEL and
# OFPFME_ for PAUSED/RESUMED.)
# XXX onf_flow_monitor_request and onf_flow_update_full use
# match_len + oxm_fields instead of ofp_match. this pointless
# diverge from OF1.4 looks like a botch when updating from OF1.0.
# XXX the spec mentions "the current implementation of Open vSwitch"
# but, as of writing this, it doesn't have this extension implemented
# at all. we assume that it is about OF1.0 NX.
# XXX the spec mentions nx13_flow_monitor_request but i couldn't find
# it in OVS nicira-ext.h.
# onf_flow_monitor_request
# ONFMP_FLOW_MONITOR request's body is zero or more instances of this.
# id, flags, match_len, out_put, table_id, zeros[3]
ONF_FLOW_MONITOR_REQUEST_PACK_STR = '!IHHIB3x'
ONF_FLOW_MONITOR_REQUEST_SIZE = 16
assert (calcsize(ONF_FLOW_MONITOR_REQUEST_PACK_STR) ==
ONF_FLOW_MONITOR_REQUEST_SIZE)
# onf_flow_monitor_request.flags
ONFFMF_INITIAL = 1 << 0
ONFFMF_ADD = 1 << 1
ONFFMF_DELETE = 1 << 2
ONFFMF_MODIFY = 1 << 3
ONFFMF_ACTIONS = 1 << 4
ONFFMF_OWN = 1 << 5
# onf_flow_update_header
# ONFMP_FLOW_MONITOR request's body is an array of this
# length, event
ONF_FLOW_UPDATE_HEADER_PACK_STR = '!HH'
ONF_FLOW_UPDATE_HEADER_SIZE = 4
assert (calcsize(ONF_FLOW_UPDATE_HEADER_PACK_STR) ==
ONF_FLOW_UPDATE_HEADER_SIZE)
# onf_flow_update_full, excluding onf_flow_update_header
# reason, priority, idle_timeout, hard_timeout, match_len, table_id,
# pad, cookie
ONF_FLOW_UPDATE_FULL_PACK_STR = '!HHHHHBxQ'
ONF_FLOW_UPDATE_FULL_SIZE = 24 - ONF_FLOW_UPDATE_HEADER_SIZE
assert (calcsize(ONF_FLOW_UPDATE_FULL_PACK_STR) ==
ONF_FLOW_UPDATE_FULL_SIZE)
# onf_flow_update_abbrev, excluding onf_flow_update_header
# xid
ONF_FLOW_UPDATE_ABBREV_PACK_STR = '!I'
ONF_FLOW_UPDATE_ABBREV_SIZE = 8 - ONF_FLOW_UPDATE_HEADER_SIZE
assert (calcsize(ONF_FLOW_UPDATE_ABBREV_PACK_STR) ==
ONF_FLOW_UPDATE_ABBREV_SIZE)
# enum onf_flow_udpate_event
ONFFME_ADDED = 0 # some variations in the spec; ONFMFE_ADD, ONFFME_ADD
ONFFME_DELETED = 1
ONFFME_MODIFIED = 2
ONFFME_ABBREV = 3
# enum onf_flow_monitor_msg_type
ONFT_FLOW_MONITOR_CANCEL = 1870 # controller -> switch
ONFT_FLOW_MONITOR_PAUSED = 1871 # switch -> controller
ONFT_FLOW_MONITOR_RESUMED = 1872 # switch -> controller
# struct ofp_error_msg
OFP_ERROR_MSG_PACK_STR = '!HH'
OFP_ERROR_MSG_SIZE = 12
assert (calcsize(OFP_ERROR_MSG_PACK_STR) + OFP_HEADER_SIZE ==
OFP_ERROR_MSG_SIZE)
# enum ofp_error_type
OFPET_HELLO_FAILED = 0 # Hello protocol failed.
OFPET_BAD_REQUEST = 1 # Request was not understood.
OFPET_BAD_ACTION = 2 # Error in action description.
OFPET_BAD_INSTRUCTION = 3 # Error in instruction list.
OFPET_BAD_MATCH = 4 # Error in match.
OFPET_FLOW_MOD_FAILED = 5 # Problem modifying flow entry.
OFPET_GROUP_MOD_FAILED = 6 # Problem modifying group entry.
OFPET_PORT_MOD_FAILED = 7 # OFPT_PORT_MOD failed.
OFPET_TABLE_MOD_FAILED = 8 # Table mod request failed.
OFPET_QUEUE_OP_FAILED = 9 # Queue operation failed.
OFPET_SWITCH_CONFIG_FAILED = 10 # Switch config request failed.
OFPET_ROLE_REQUEST_FAILED = 11 # Controller Role request failed.
OFPET_METER_MOD_FAILED = 12 # Error in meter.
OFPET_TABLE_FEATURES_FAILED = 13 # Setting table features failed.
OFPET_EXPERIMENTER = 0xffff # Experimenter error messages.
# enum ofp_hello_failed_code
OFPHFC_INCOMPATIBLE = 0 # No compatible version.
OFPHFC_EPERM = 1 # Permissions error.
# enum ofp_bad_request_code
OFPBRC_BAD_VERSION = 0 # ofp_header.version not supported.
OFPBRC_BAD_TYPE = 1 # ofp_header.type not supported.
OFPBRC_BAD_MULTIPART = 2 # ofp_multipart_request.type not
# supported.
OFPBRC_BAD_EXPERIMENTER = 3 # Experimenter id not supported
# (in ofp_experimenter_header
# or ofp_multipart_request or
# ofp_multipart_reply).
OFPBRC_BAD_EXP_TYPE = 4 # Experimenter type not supported.
OFPBRC_EPERM = 5 # Permissions error.
OFPBRC_BAD_LEN = 6 # Wrong request length for type.
OFPBRC_BUFFER_EMPTY = 7 # Specified buffer has already been
# used.
OFPBRC_BUFFER_UNKNOWN = 8 # Specified buffer does not exist.
OFPBRC_BAD_TABLE_ID = 9 # Specified table-id invalid or does
# not exist.
OFPBRC_IS_SLAVE = 10 # Denied because controller is slave.
OFPBRC_BAD_PORT = 11 # Invalid port.
OFPBRC_BAD_PACKET = 12 # Invalid packet in packet-out
OFPBRC_MULTIPART_BUFFER_OVERFLOW = 13 # ofp_multipart_request
# overflowed the assigned buffer.
# enum ofp_bad_action_code
OFPBAC_BAD_TYPE = 0 # Unknown action type.
OFPBAC_BAD_LEN = 1 # Length problem in actions.
OFPBAC_BAD_EXPERIMENTER = 2 # Unknown experimenter id specified.
OFPBAC_BAD_EXP_TYPE = 3 # Unknown action type for experimenter id.
OFPBAC_BAD_OUT_PORT = 4 # Problem validating output action.
OFPBAC_BAD_ARGUMENT = 5 # Bad action argument.
OFPBAC_EPERM = 6 # Permissions error.
OFPBAC_TOO_MANY = 7 # Can't handle this many actions.
OFPBAC_BAD_QUEUE = 8 # Problem validating output queue.
OFPBAC_BAD_OUT_GROUP = 9 # Invalid group id in forward action.
OFPBAC_MATCH_INCONSISTENT = 10 # Action can't apply for this match,
# or Set-Field missing prerequisite.
OFPBAC_UNSUPPORTED_ORDER = 11 # Action order is unsupported for
# the action list in an Apply-Actions
# instruction
OFPBAC_BAD_TAG = 12 # Actions uses an unsupported tag/encap.
OFPBAC_BAD_SET_TYPE = 13 # Unsupported type in SET_FIELD action.
OFPBAC_BAD_SET_LEN = 14 # Length problem in SET_FIELD action.
OFPBAC_BAD_SET_ARGUMENT = 15 # Bad arguement in SET_FIELD action.
# enum ofp_bad_instruction_code
OFPBIC_UNKNOWN_INST = 0 # Unknown instruction.
OFPBIC_UNSUP_INST = 1 # Switch or table does not support
# the instruction.
OFPBIC_BAD_TABLE_ID = 2 # Invalid Table-Id specified
OFPBIC_UNSUP_METADATA = 3 # Metadata value unsupported by datapath.
OFPBIC_UNSUP_METADATA_MASK = 4 # Metadata mask value unsupported by
# datapath.
OFPBIC_BAD_EXPERIMENTER = 5 # Unknown experimenter id specified.
OFPBIC_BAD_EXP_TYPE = 6 # Unknown instruction for experimenter id.
OFPBIC_BAD_LEN = 7 # Length problem in instrucitons.
OFPBIC_EPERM = 8 # Permissions error.
# enum ofp_bad_match_code
OFPBMC_BAD_TYPE = 0 # Unsupported match type apecified by
# the match.
OFPBMC_BAD_LEN = 1 # Length problem in math.
OFPBMC_BAD_TAG = 2 # Match uses an unsupported tag/encap.
OFPBMC_BAD_DL_ADDR_MASK = 3 # Unsupported datalink addr mask -
# switch does not support arbitrary
# datalink address mask.
OFPBMC_BAD_NW_ADDR_MASK = 4 # Unsupported network addr mask -
# switch does not support arbitrary
# network addres mask.
OFPBMC_BAD_WILDCARDS = 5 # Unsupported combination of fields
# masked or omitted in the match.
OFPBMC_BAD_FIELD = 6 # Unsupported field type in the match.
OFPBMC_BAD_VALUE = 7 # Unsupported value in a match field.
OFPBMC_BAD_MASK = 8 # Unsupported mask specified in the
# match.
OFPBMC_BAD_PREREQ = 9 # A prerequisite was not met.
OFPBMC_DUP_FIELD = 10 # A field type was duplicated.
OFPBMC_EPERM = 11 # Permissions error.
# enum ofp_flow_mod_failed_code
OFPFMFC_UNKNOWN = 0 # Unspecified error.
OFPFMFC_TABLE_FULL = 1 # Flow not added because table was full.
OFPFMFC_BAD_TABLE_ID = 2 # Table does not exist
OFPFMFC_OVERLAP = 3 # Attempted to add overlapping flow
# with CHECK_OVERLAP flag set.
OFPFMFC_EPERM = 4 # Permissions error.
OFPFMFC_BAD_TIMEOUT = 5 # Flow not added because of
# unsupported idle/hard timeout.
OFPFMFC_BAD_COMMAND = 6 # Unsupported or unknown command.
OFPFMFC_BAD_FLAGS = 7 # Unsupported or unknown flags.
# enum ofp_group_mod_failed_code
OFPGMFC_GROUP_EXISTS = 0
OFPGMFC_INVALID_GROUP = 1
OFPGMFC_WEIGHT_UNSUPPORTED = 2 # Switch does not support unequal load
# sharing with select groups.
OFPGMFC_OUT_OF_GROUPS = 3 # The group table is full.
OFPGMFC_OUT_OF_BUCKETS = 4 # The maximum number of action buckets
# for a group has been exceeded.
OFPGMFC_CHAINING_UNSUPPORTED = 5 # Switch does not support groups that
# forward to groups.
OFPGMFC_WATCH_UNSUPPORTED = 6 # This group cannot watch the
# watch_port or watch_group specified.
OFPGMFC_LOOP = 7 # Group entry would cause a loop.
OFPGMFC_UNKNOWN_GROUP = 8 # Group not modified because a group
# MODIFY attempted to modify a
# non-existent group.
OFPGMFC_CHAINED_GROUP = 9 # Group not deleted because another
# group is forwarding to it.
OFPGMFC_BAD_TYPE = 10 # Unsupported or unknown group type.
OFPGMFC_BAD_COMMAND = 11 # Unsupported or unknown command.
OFPGMFC_BAD_BUCKET = 12 # Error in bucket.
OFPGMFC_BAD_WATCH = 13 # Error in watch port/group.
OFPGMFC_EPERM = 14 # Permissions error.
# enum ofp_port_mod_failed_code
OFPPMFC_BAD_PORT = 0 # Specified port does not exist.
OFPPMFC_BAD_HW_ADDR = 1 # Specified hardware address does not
# match the port number.
OFPPMFC_BAD_CONFIG = 2 # Specified config is invalid.
OFPPMFC_BAD_ADVERTISE = 3 # Specified advertise is invalid.
OFPPMFC_EPERM = 4 # Permissions error.
# enum ofp_table_mod_failed_code
OFPTMFC_BAD_TABLE = 0 # Specified table does not exist.
OFPTMFC_BAD_CONFIG = 1 # Specified config is invalid.
OFPTMFC_EPERM = 2 # Permissions error
# enum ofp_queue_op_failed_code
OFPQOFC_BAD_PORT = 0 # Invalid port (or port does not exist).
OFPQOFC_BAD_QUEUE = 1 # Queue does not exist.
OFPQOFC_EPERM = 2 # Permissions error.
# enum ofp_switch_config_failed_code
OFPSCFC_BAD_FLAGS = 0 # Specified flags is invalid.
OFPSCFC_BAD_LEN = 1 # Specified len is invalid.
OFPQCFC_EPERM = 2 # Permissions error (depracated).
# New or updated Ryu applications shall use
# OFPSCFC_EPERM. The variable name is a typo of
# in specifications before v1.3.1 (EXT-208).
OFPSCFC_EPERM = 2 # Permissions error.
# enum ofp_role_request_failed_code
OFPRRFC_STALE = 0 # Stale Message: old generation_id.
OFPRRFC_UNSUP = 1 # Controller role change unsupported.
OFPRRFC_BAD_ROLE = 2 # Invalid role.
# enum ofp_meter_mod_failed_code
OFPMMFC_UNKNOWN = 0 # Unspecified error.
OFPMMFC_METER_EXISTS = 1 # Meter not added because a Meter ADD
# attempted to replace an existing Meter.
OFPMMFC_INVALID_METER = 2 # Meter not added because Meter specified
# is invalid.
OFPMMFC_UNKNOWN_METER = 3 # Meter not modified because a Meter
# MODIFY attempted to modify a non-existent
# Meter.
OFPMMFC_BAD_COMMAND = 4 # Unsupported or unknown command.
OFPMMFC_BAD_FLAGS = 5 # Flag configuration unsupported.
OFPMMFC_BAD_RATE = 6 # Rate unsupported.
OFPMMFC_BAD_BURST = 7 # Burst size unsupported.
OFPMMFC_BAD_BAND = 8 # Band unsupported.
OFPMMFC_BAD_BAND_VALUE = 9 # Band value unsupported.
OFPMMFC_OUT_OF_METERS = 10 # No more meters availabile.
OFPMMFC_OUT_OF_BANDS = 11 # The maximum number of properties
# for a meter has been exceeded.
# enum ofp_table_features_failed_code
OFPTFFC_BAD_TABLE = 0 # Specified table does not exist.
OFPTFFC_BAD_METADATA = 1 # Invalid metadata mask.
OFPTFFC_BAD_TYPE = 2 # Unknown property type.
OFPTFFC_BAD_LEN = 3 # Length problem in properties.
OFPTFFC_BAD_ARGUMENT = 4 # Unsupported property value.
OFPTFFC_EPERM = 5 # Permissions error.
# struct ofp_error_experimenter_msg
OFP_ERROR_EXPERIMENTER_MSG_PACK_STR = '!HHI'
OFP_ERROR_EXPERIMENTER_MSG_SIZE = 16
assert (calcsize(OFP_ERROR_EXPERIMENTER_MSG_PACK_STR) +
OFP_HEADER_SIZE) == OFP_ERROR_EXPERIMENTER_MSG_SIZE
# struct ofp_experimenter_header
OFP_EXPERIMENTER_HEADER_PACK_STR = '!II'
OFP_EXPERIMENTER_HEADER_SIZE = 16
assert (calcsize(OFP_EXPERIMENTER_HEADER_PACK_STR) + OFP_HEADER_SIZE
== OFP_EXPERIMENTER_HEADER_SIZE)
# exp_type values for OFPET_EXPERIMENTER (experimenter=ONF_EXPERIMENTER_ID)
ONFERR_ET_UNKNOWN = 2300
ONFERR_ET_EPERM = 2301
ONFERR_ET_BAD_ID = 2302
ONFERR_ET_BUNDLE_EXIST = 2303
ONFERR_ET_BUNDLE_CLOSED = 2304
ONFERR_ET_OUT_OF_BUNDLES = 2305
ONFERR_ET_BAD_TYPE = 2306
ONFERR_ET_BAD_FLAGS = 2307
ONFERR_ET_MSG_BAD_LEN = 2308
ONFERR_ET_MSG_BAD_XID = 2309
ONFERR_ET_MSG_UNSUP = 2310
ONFERR_ET_MSG_CONFLICT = 2311
ONFERR_ET_MSG_TOO_MANY = 2312
ONFERR_ET_FAILED = 2313
ONFERR_ET_TIMEOUT = 2314
ONFERR_ET_BUNDLE_IN_PROGRESS = 2315
ONFERR_ET_CANT_SYNC = 2320
ONFERR_ET_BAD_PRIORITY = 2360
ONFERR_ET_ASYNC_INVALUD = 2370
ONFERR_ET_ASYNC_UNSUPPORTED = 2371
ONFERR_ET_ASYNC_EPERM = 2372
ONFERR_DUP_INSTRUCTION = 2600 # the lack of _ET_ is per spec
ONFERR_ET_MPART_REQUEST_TIMEOUT = 2640
ONFERR_ET_MPART_REPLY_TIMEOUT = 2641
# struct ofp_hello
OFP_HELLO_HEADER_SIZE = 8
# struct ofp_hello_elem_header
OFP_HELLO_ELEM_HEADER_PACK_STR = '!HH'
OFP_HELLO_ELEM_HEADER_SIZE = 4
assert (calcsize(OFP_HELLO_ELEM_HEADER_PACK_STR) == OFP_HELLO_ELEM_HEADER_SIZE)
# enum ofp_hello_elem_type
OFPHET_VERSIONBITMAP = 1
# struct ofp_hello_elem_versionbitmap
OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_PACK_STR = '!HH'
OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE = 4
assert (calcsize(OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_PACK_STR) ==
OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE)
# OXM
def _oxm_tlv_header(class_, field, hasmask, length):
return (class_ << 16) | (field << 9) | (hasmask << 8) | length
def oxm_tlv_header(field, length):
return _oxm_tlv_header(OFPXMC_OPENFLOW_BASIC, field, 0, length)
def oxm_tlv_header_w(field, length):
return _oxm_tlv_header(OFPXMC_OPENFLOW_BASIC, field, 1, length * 2)
def oxm_tlv_header_extract_hasmask(header):
return (header >> 8) & 1
def oxm_tlv_header_extract_length(header):
if oxm_tlv_header_extract_hasmask(header):
length = (header & 0xff) // 2
else:
length = header & 0xff
return length
oxm_types = [
oxm_fields.OpenFlowBasic('in_port', 0, type_desc.Int4),
oxm_fields.OpenFlowBasic('in_phy_port', 1, type_desc.Int4),
oxm_fields.OpenFlowBasic('metadata', 2, type_desc.Int8),
oxm_fields.OpenFlowBasic('eth_dst', 3, type_desc.MacAddr),
oxm_fields.OpenFlowBasic('eth_src', 4, type_desc.MacAddr),
oxm_fields.OpenFlowBasic('eth_type', 5, type_desc.Int2),
oxm_fields.OpenFlowBasic('vlan_vid', 6, type_desc.Int2),
oxm_fields.OpenFlowBasic('vlan_pcp', 7, type_desc.Int1),
oxm_fields.OpenFlowBasic('ip_dscp', 8, type_desc.Int1),
oxm_fields.OpenFlowBasic('ip_ecn', 9, type_desc.Int1),
oxm_fields.OpenFlowBasic('ip_proto', 10, type_desc.Int1),
oxm_fields.OpenFlowBasic('ipv4_src', 11, type_desc.IPv4Addr),
oxm_fields.OpenFlowBasic('ipv4_dst', 12, type_desc.IPv4Addr),
oxm_fields.OpenFlowBasic('tcp_src', 13, type_desc.Int2),
oxm_fields.OpenFlowBasic('tcp_dst', 14, type_desc.Int2),
oxm_fields.OpenFlowBasic('udp_src', 15, type_desc.Int2),
oxm_fields.OpenFlowBasic('udp_dst', 16, type_desc.Int2),
oxm_fields.OpenFlowBasic('sctp_src', 17, type_desc.Int2),
oxm_fields.OpenFlowBasic('sctp_dst', 18, type_desc.Int2),
oxm_fields.OpenFlowBasic('icmpv4_type', 19, type_desc.Int1),
oxm_fields.OpenFlowBasic('icmpv4_code', 20, type_desc.Int1),
oxm_fields.OpenFlowBasic('arp_op', 21, type_desc.Int2),
oxm_fields.OpenFlowBasic('arp_spa', 22, type_desc.IPv4Addr),
oxm_fields.OpenFlowBasic('arp_tpa', 23, type_desc.IPv4Addr),
oxm_fields.OpenFlowBasic('arp_sha', 24, type_desc.MacAddr),
oxm_fields.OpenFlowBasic('arp_tha', 25, type_desc.MacAddr),
oxm_fields.OpenFlowBasic('ipv6_src', 26, type_desc.IPv6Addr),
oxm_fields.OpenFlowBasic('ipv6_dst', 27, type_desc.IPv6Addr),
oxm_fields.OpenFlowBasic('ipv6_flabel', 28, type_desc.Int4),
oxm_fields.OpenFlowBasic('icmpv6_type', 29, type_desc.Int1),
oxm_fields.OpenFlowBasic('icmpv6_code', 30, type_desc.Int1),
oxm_fields.OpenFlowBasic('ipv6_nd_target', 31, type_desc.IPv6Addr),
oxm_fields.OpenFlowBasic('ipv6_nd_sll', 32, type_desc.MacAddr),
oxm_fields.OpenFlowBasic('ipv6_nd_tll', 33, type_desc.MacAddr),
oxm_fields.OpenFlowBasic('mpls_label', 34, type_desc.Int4),
oxm_fields.OpenFlowBasic('mpls_tc', 35, type_desc.Int1),
oxm_fields.OpenFlowBasic('mpls_bos', 36, type_desc.Int1),
oxm_fields.OpenFlowBasic('pbb_isid', 37, type_desc.Int3),
oxm_fields.OpenFlowBasic('tunnel_id', 38, type_desc.Int8),
oxm_fields.OpenFlowBasic('ipv6_exthdr', 39, type_desc.Int2),
# EXT-256 Old version of ONF Extension
oxm_fields.OldONFExperimenter('pbb_uca', 2560, type_desc.Int1),
# EXT-109 TCP flags match field Extension
oxm_fields.ONFExperimenter('tcp_flags', 42, type_desc.Int2),
# EXT-233 Output match Extension
# NOTE(yamamoto): The spec says uint64_t but I assume it's an error.
oxm_fields.ONFExperimenter('actset_output', 43, type_desc.Int4),
] + nx_match.oxm_types
oxm_fields.generate(__name__)
# Note: struct ofp_prop_experimenter is specific to this implementation.
# It does not have a corresponding structure in the specification.
# This structure defines common structure for ofp_*_prop_experimenter.
# struct ofp_prop_experimenter
OFP_PROP_EXPERIMENTER_PACK_STR = '!HHII'
OFP_PROP_EXPERIMENTER_SIZE = 12
assert (calcsize(OFP_PROP_EXPERIMENTER_PACK_STR) ==
OFP_PROP_EXPERIMENTER_SIZE)
# generate utility methods
ofproto_utils.generate(__name__)
# define constants
OFP_VERSION = 0x04
OFP_TCP_PORT = 6633
MAX_XID = 0xffffffff
OFP_NO_BUFFER = 0xffffffff
|
apache-2.0
|
danmergens/mi-instrument
|
mi/dataset/parser/wc_hmr_cspp.py
|
5
|
7883
|
#!/usr/bin/env python
"""
@package mi.dataset.parser
@file marine-integrations/mi/dataset/parser/wc_hmr_cspp.py
@author Jeff Roy
@brief wc_hmr Parser for the cspp_eng_cspp dataset driver
Release notes: This is one of 4 parsers that make up that driver
initial release
"""
__author__ = 'Jeff Roy'
__license__ = 'Apache 2.0'
import numpy
from mi.core.log import get_logger
log = get_logger()
from mi.core.common import BaseEnum
from mi.core.exceptions import RecoverableSampleException
from mi.core.instrument.dataset_data_particle import DataParticle
from mi.dataset.parser.cspp_base import \
CsppParser, \
Y_OR_N_REGEX, \
CsppMetadataDataParticle, \
MetadataRawDataKey, \
encode_y_or_n
from mi.dataset.parser.common_regexes import MULTIPLE_TAB_REGEX, \
FLOAT_REGEX, \
END_OF_LINE_REGEX
# Input Records are formatted as follows
# FORMAT DATA Type Field Units Notes
#
# string float64 Profiler Timestamp seconds Seconds since 1/1/70 with millisecond resolution
# string float32 Depth decibars
# string string Suspect Timestamp 1 "y" or "n"
# string float32 Heading deg Heading in degrees
# string float32 Pitch deg Pitch in degrees
# string float32 Roll deg Roll in degrees
DATA_REGEX = '(' + FLOAT_REGEX + ')' + MULTIPLE_TAB_REGEX # Profiler Timestamp
DATA_REGEX += '(' + FLOAT_REGEX + ')' + MULTIPLE_TAB_REGEX # Depth
DATA_REGEX += '(' + Y_OR_N_REGEX + ')' + MULTIPLE_TAB_REGEX # Suspect Timestamp
DATA_REGEX += '(' + FLOAT_REGEX + ')' + MULTIPLE_TAB_REGEX # Heading
DATA_REGEX += '(' + FLOAT_REGEX + ')' + MULTIPLE_TAB_REGEX # Pitch
DATA_REGEX += '(' + FLOAT_REGEX + ')' + END_OF_LINE_REGEX # Roll
class WcHmrDataTypeKey(BaseEnum):
WC_HMR_CSPP_TELEMETERED = 'wc_hmr_cspp_telemetered'
WC_HMR_CSPP_RECOVERED = 'wc_hmr_cspp_recovered'
class DataMatchesGroupNumber(BaseEnum):
"""
An enum for group match indices for a data record chunk.
Used to access the match groups in the particle raw data
"""
PROFILER_TIMESTAMP = 1
PRESSURE = 2
SUSPECT_TIMESTAMP = 3
HEADING = 4
PITCH = 5
ROLL = 6
class WcHmrDataParticleType(BaseEnum):
ENGINEERING_TELEMETERED = 'cspp_eng_cspp_wc_hmr_eng'
ENGINEERING_RECOVERED = 'cspp_eng_cspp_wc_hmr_eng_recovered'
METADATA_TELEMETERED = 'cspp_eng_cspp_wc_hmr_metadata'
METADATA_RECOVERED = 'cspp_eng_cspp_wc_hmr_metadata_recovered'
class WcHmrEngDataParticleKey(BaseEnum):
"""
The data particle keys associated with wc_hmr engineering data particle parameters
"""
PROFILER_TIMESTAMP = 'profiler_timestamp'
PRESSURE = 'pressure_depth'
SUSPECT_TIMESTAMP = 'suspect_timestamp'
HEADING = 'heading'
PITCH = 'pitch'
ROLL = 'roll'
# A group of instrument data particle encoding rules used to simplify encoding using a loop
ENGINEERING_PARTICLE_ENCODING_RULES = [
(WcHmrEngDataParticleKey.PROFILER_TIMESTAMP, DataMatchesGroupNumber.PROFILER_TIMESTAMP, numpy.float),
(WcHmrEngDataParticleKey.PRESSURE, DataMatchesGroupNumber.PRESSURE, float),
(WcHmrEngDataParticleKey.SUSPECT_TIMESTAMP, DataMatchesGroupNumber.SUSPECT_TIMESTAMP, encode_y_or_n),
(WcHmrEngDataParticleKey.HEADING, DataMatchesGroupNumber.HEADING, float),
(WcHmrEngDataParticleKey.PITCH, DataMatchesGroupNumber.PITCH, float),
(WcHmrEngDataParticleKey.ROLL, DataMatchesGroupNumber.ROLL, float),
]
class WcHmrMetadataDataParticle(CsppMetadataDataParticle):
"""
Class for building a wc hmr metadata particle
"""
def _build_parsed_values(self):
"""
Take something in the data format and turn it into
an array of dictionaries defining the data in the particle
with the appropriate tag.
@throws RecoverableSampleException If there is a problem with sample creation
"""
results = []
try:
# Append the base metadata parsed values to the results to return
results += self._build_metadata_parsed_values()
data_match = self.raw_data[MetadataRawDataKey.DATA_MATCH]
# Set the internal timestamp
internal_timestamp_unix = numpy.float(data_match.group(
DataMatchesGroupNumber.PROFILER_TIMESTAMP))
self.set_internal_timestamp(unix_time=internal_timestamp_unix)
except (ValueError, TypeError, IndexError) as ex:
log.warn("Exception when building parsed values")
raise RecoverableSampleException("Error (%s) while decoding parameters in data: [%s]"
% (ex, self.raw_data))
return results
class WcHmrMetadataRecoveredDataParticle(WcHmrMetadataDataParticle):
"""
Class for building a wc hmr recovered metadata particle
"""
_data_particle_type = WcHmrDataParticleType.METADATA_RECOVERED
class WcHmrMetadataTelemeteredDataParticle(WcHmrMetadataDataParticle):
"""
Class for building a wc hmr telemetered metadata particle
"""
_data_particle_type = WcHmrDataParticleType.METADATA_TELEMETERED
class WcHmrEngDataParticle(DataParticle):
"""
Class for parsing data from the wc hmr engineering data set
"""
def _build_parsed_values(self):
"""
Take something in the data format and turn it into
an array of dictionaries defining the data in the particle
with the appropriate tag.
@throws RecoverableSampleException If there is a problem with sample creation
"""
results = []
try:
# Process each of the instrument particle parameters
for name, group, function in ENGINEERING_PARTICLE_ENCODING_RULES:
results.append(self._encode_value(name, self.raw_data.group(group), function))
# # Set the internal timestamp
internal_timestamp_unix = numpy.float(self.raw_data.group(
DataMatchesGroupNumber.PROFILER_TIMESTAMP))
self.set_internal_timestamp(unix_time=internal_timestamp_unix)
# We shouldn't end up with an exception due to the strongly specified regex, but we
# will ensure we catch any potential errors just in case
except (ValueError, TypeError, IndexError) as ex:
log.warn("Exception when building parsed values")
raise RecoverableSampleException("Error (%s) while decoding parameters in data: [%s]"
% (ex, self.raw_data))
return results
class WcHmrEngRecoveredDataParticle(WcHmrEngDataParticle):
"""
Class for building a wc hmr recovered engineering data particle
"""
_data_particle_type = WcHmrDataParticleType.ENGINEERING_RECOVERED
class WcHmrEngTelemeteredDataParticle(WcHmrEngDataParticle):
"""
Class for building a wc hmr telemetered engineering data particle
"""
_data_particle_type = WcHmrDataParticleType.ENGINEERING_TELEMETERED
class WcHmrCsppParser(CsppParser):
def __init__(self,
config,
stream_handle,
exception_callback):
"""
This method is a constructor that will instantiate an WcHmrCsppParser object.
@param config The configuration for this WcHmrCsppParser parser
@param stream_handle The handle to the data stream containing the cspp_eng_cspp data
@param exception_callback The function to call to report exceptions
"""
# Call the superclass constructor
super(WcHmrCsppParser, self).__init__(config,
stream_handle,
exception_callback,
DATA_REGEX,
ignore_matcher=None)
|
bsd-2-clause
|
minejo/shadowsocks
|
shadowsocks/daemon.py
|
386
|
5602
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import logging
import signal
import time
from shadowsocks import common, shell
# this module is ported from ShadowVPN daemon.c
def daemon_exec(config):
if 'daemon' in config:
if os.name != 'posix':
raise Exception('daemon mode is only supported on Unix')
command = config['daemon']
if not command:
command = 'start'
pid_file = config['pid-file']
log_file = config['log-file']
if command == 'start':
daemon_start(pid_file, log_file)
elif command == 'stop':
daemon_stop(pid_file)
# always exit after daemon_stop
sys.exit(0)
elif command == 'restart':
daemon_stop(pid_file)
daemon_start(pid_file, log_file)
else:
raise Exception('unsupported daemon command %s' % command)
def write_pid_file(pid_file, pid):
import fcntl
import stat
try:
fd = os.open(pid_file, os.O_RDWR | os.O_CREAT,
stat.S_IRUSR | stat.S_IWUSR)
except OSError as e:
shell.print_exception(e)
return -1
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert flags != -1
flags |= fcntl.FD_CLOEXEC
r = fcntl.fcntl(fd, fcntl.F_SETFD, flags)
assert r != -1
# There is no platform independent way to implement fcntl(fd, F_SETLK, &fl)
# via fcntl.fcntl. So use lockf instead
try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 0, 0, os.SEEK_SET)
except IOError:
r = os.read(fd, 32)
if r:
logging.error('already started at pid %s' % common.to_str(r))
else:
logging.error('already started')
os.close(fd)
return -1
os.ftruncate(fd, 0)
os.write(fd, common.to_bytes(str(pid)))
return 0
def freopen(f, mode, stream):
oldf = open(f, mode)
oldfd = oldf.fileno()
newfd = stream.fileno()
os.close(newfd)
os.dup2(oldfd, newfd)
def daemon_start(pid_file, log_file):
def handle_exit(signum, _):
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(1)
signal.signal(signal.SIGINT, handle_exit)
signal.signal(signal.SIGTERM, handle_exit)
# fork only once because we are sure parent will exit
pid = os.fork()
assert pid != -1
if pid > 0:
# parent waits for its child
time.sleep(5)
sys.exit(0)
# child signals its parent to exit
ppid = os.getppid()
pid = os.getpid()
if write_pid_file(pid_file, pid) != 0:
os.kill(ppid, signal.SIGINT)
sys.exit(1)
os.setsid()
signal.signal(signal.SIG_IGN, signal.SIGHUP)
print('started')
os.kill(ppid, signal.SIGTERM)
sys.stdin.close()
try:
freopen(log_file, 'a', sys.stdout)
freopen(log_file, 'a', sys.stderr)
except IOError as e:
shell.print_exception(e)
sys.exit(1)
def daemon_stop(pid_file):
import errno
try:
with open(pid_file) as f:
buf = f.read()
pid = common.to_str(buf)
if not buf:
logging.error('not running')
except IOError as e:
shell.print_exception(e)
if e.errno == errno.ENOENT:
# always exit 0 if we are sure daemon is not running
logging.error('not running')
return
sys.exit(1)
pid = int(pid)
if pid > 0:
try:
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno == errno.ESRCH:
logging.error('not running')
# always exit 0 if we are sure daemon is not running
return
shell.print_exception(e)
sys.exit(1)
else:
logging.error('pid is not positive: %d', pid)
# sleep for maximum 10s
for i in range(0, 200):
try:
# query for the pid
os.kill(pid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
break
time.sleep(0.05)
else:
logging.error('timed out when stopping pid %d', pid)
sys.exit(1)
print('stopped')
os.unlink(pid_file)
def set_user(username):
if username is None:
return
import pwd
import grp
try:
pwrec = pwd.getpwnam(username)
except KeyError:
logging.error('user not found: %s' % username)
raise
user = pwrec[0]
uid = pwrec[2]
gid = pwrec[3]
cur_uid = os.getuid()
if uid == cur_uid:
return
if cur_uid != 0:
logging.error('can not set user as nonroot user')
# will raise later
# inspired by supervisor
if hasattr(os, 'setgroups'):
groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]]
groups.insert(0, gid)
os.setgroups(groups)
os.setgid(gid)
os.setuid(uid)
|
apache-2.0
|
postlund/home-assistant
|
homeassistant/components/tellduslive/sensor.py
|
7
|
4738
|
"""Support for Tellstick Net/Telstick Live sensors."""
import logging
from homeassistant.components import sensor, tellduslive
from homeassistant.const import (
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
POWER_WATT,
TEMP_CELSIUS,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .entry import TelldusLiveEntity
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPE_TEMPERATURE = "temp"
SENSOR_TYPE_HUMIDITY = "humidity"
SENSOR_TYPE_RAINRATE = "rrate"
SENSOR_TYPE_RAINTOTAL = "rtot"
SENSOR_TYPE_WINDDIRECTION = "wdir"
SENSOR_TYPE_WINDAVERAGE = "wavg"
SENSOR_TYPE_WINDGUST = "wgust"
SENSOR_TYPE_UV = "uv"
SENSOR_TYPE_WATT = "watt"
SENSOR_TYPE_LUMINANCE = "lum"
SENSOR_TYPE_DEW_POINT = "dewp"
SENSOR_TYPE_BAROMETRIC_PRESSURE = "barpress"
SENSOR_TYPES = {
SENSOR_TYPE_TEMPERATURE: [
"Temperature",
TEMP_CELSIUS,
None,
DEVICE_CLASS_TEMPERATURE,
],
SENSOR_TYPE_HUMIDITY: ["Humidity", "%", None, DEVICE_CLASS_HUMIDITY],
SENSOR_TYPE_RAINRATE: ["Rain rate", "mm/h", "mdi:water", None],
SENSOR_TYPE_RAINTOTAL: ["Rain total", "mm", "mdi:water", None],
SENSOR_TYPE_WINDDIRECTION: ["Wind direction", "", "", None],
SENSOR_TYPE_WINDAVERAGE: ["Wind average", "m/s", "", None],
SENSOR_TYPE_WINDGUST: ["Wind gust", "m/s", "", None],
SENSOR_TYPE_UV: ["UV", "UV", "", None],
SENSOR_TYPE_WATT: ["Power", POWER_WATT, "", None],
SENSOR_TYPE_LUMINANCE: ["Luminance", "lx", None, DEVICE_CLASS_ILLUMINANCE],
SENSOR_TYPE_DEW_POINT: ["Dew Point", TEMP_CELSIUS, None, DEVICE_CLASS_TEMPERATURE],
SENSOR_TYPE_BAROMETRIC_PRESSURE: ["Barometric Pressure", "kPa", "", None],
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up TelldusLive.
Can only be called when a user accidentally mentions the platform in their
config. But even in that case it would have been ignored.
"""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up tellduslive sensors dynamically."""
async def async_discover_sensor(device_id):
"""Discover and add a discovered sensor."""
client = hass.data[tellduslive.DOMAIN]
async_add_entities([TelldusLiveSensor(client, device_id)])
async_dispatcher_connect(
hass,
tellduslive.TELLDUS_DISCOVERY_NEW.format(sensor.DOMAIN, tellduslive.DOMAIN),
async_discover_sensor,
)
class TelldusLiveSensor(TelldusLiveEntity):
"""Representation of a Telldus Live sensor."""
@property
def device_id(self):
"""Return id of the device."""
return self._id[0]
@property
def _type(self):
"""Return the type of the sensor."""
return self._id[1]
@property
def _value(self):
"""Return value of the sensor."""
return self.device.value(*self._id[1:])
@property
def _value_as_temperature(self):
"""Return the value as temperature."""
return round(float(self._value), 1)
@property
def _value_as_luminance(self):
"""Return the value as luminance."""
return round(float(self._value), 1)
@property
def _value_as_humidity(self):
"""Return the value as humidity."""
return int(round(float(self._value)))
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(super().name, self.quantity_name or "").strip()
@property
def state(self):
"""Return the state of the sensor."""
if not self.available:
return None
if self._type == SENSOR_TYPE_TEMPERATURE:
return self._value_as_temperature
if self._type == SENSOR_TYPE_HUMIDITY:
return self._value_as_humidity
if self._type == SENSOR_TYPE_LUMINANCE:
return self._value_as_luminance
return self._value
@property
def quantity_name(self):
"""Name of quantity."""
return SENSOR_TYPES[self._type][0] if self._type in SENSOR_TYPES else None
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSOR_TYPES[self._type][1] if self._type in SENSOR_TYPES else None
@property
def icon(self):
"""Return the icon."""
return SENSOR_TYPES[self._type][2] if self._type in SENSOR_TYPES else None
@property
def device_class(self):
"""Return the device class."""
return SENSOR_TYPES[self._type][3] if self._type in SENSOR_TYPES else None
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return "{}-{}-{}".format(*self._id)
|
apache-2.0
|
jayceyxc/hue
|
desktop/core/ext-py/Mako-0.8.1/test/test_filters.py
|
36
|
9579
|
# -*- coding: utf-8 -*-
from mako.template import Template
import unittest
from test import TemplateTest, eq_, requires_python_2
from test.util import result_lines, flatten_result
from mako.compat import u
class FilterTest(TemplateTest):
def test_basic(self):
t = Template("""
${x | myfilter}
""")
assert flatten_result(t.render(x="this is x", myfilter=lambda t: "MYFILTER->%s<-MYFILTER" % t)) == "MYFILTER->this is x<-MYFILTER"
def test_expr(self):
"""test filters that are themselves expressions"""
t = Template("""
${x | myfilter(y)}
""")
def myfilter(y):
return lambda x: "MYFILTER->%s<-%s" % (x, y)
assert flatten_result(t.render(x="this is x", myfilter=myfilter, y="this is y")) == "MYFILTER->this is x<-this is y"
def test_convert_str(self):
"""test that string conversion happens in expressions before sending to filters"""
t = Template("""
${x | trim}
""")
assert flatten_result(t.render(x=5)) == "5"
def test_quoting(self):
t = Template("""
foo ${bar | h}
""")
eq_(
flatten_result(t.render(bar="<'some bar'>")),
"foo <'some bar'>"
)
def test_entity(self):
t = Template("foo ${bar | entity}")
eq_(
flatten_result(t.render(bar="<'some bar'>")),
"foo <'some bar'>"
)
@requires_python_2
def test_quoting_non_unicode(self):
t = Template("""
foo ${bar | h}
""", disable_unicode=True,
output_encoding=None)
eq_(
flatten_result(t.render(bar="<'привет'>")),
"foo <'привет'>"
)
def test_def(self):
t = Template("""
<%def name="foo()" filter="myfilter">
this is foo
</%def>
${foo()}
""")
eq_(
flatten_result(t.render(x="this is x",
myfilter=lambda t: "MYFILTER->%s<-MYFILTER" % t)),
"MYFILTER-> this is foo <-MYFILTER"
)
def test_import(self):
t = Template("""
<%!
from mako import filters
%>\
trim this string: ${" some string to trim " | filters.trim} continue\
""")
assert t.render().strip()=="trim this string: some string to trim continue"
def test_import_2(self):
t = Template("""
trim this string: ${" some string to trim " | filters.trim} continue\
""", imports=["from mako import filters"])
#print t.code
assert t.render().strip()=="trim this string: some string to trim continue"
def test_encode_filter(self):
t = Template("""# coding: utf-8
some stuff.... ${x}
""", default_filters=['decode.utf8'])
#print t.code
eq_(
t.render_unicode(x=u("voix m’a réveillé")).strip(),
u("some stuff.... voix m’a réveillé")
)
def test_custom_default(self):
t = Template("""
<%!
def myfilter(x):
return "->" + x + "<-"
%>
hi ${'there'}
""", default_filters=['myfilter'])
assert t.render().strip()=="hi ->there<-"
def test_global(self):
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>"}
""")
assert t.render().strip() == "<tag>this is html</tag>"
def test_block_via_context(self):
t = Template("""
<%block name="foo" filter="myfilter">
some text
</%block>
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_def_via_context(self):
t = Template("""
<%def name="foo()" filter="myfilter">
some text
</%def>
${foo()}
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_text_via_context(self):
t = Template("""
<%text filter="myfilter">
some text
</%text>
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_nflag(self):
t = Template("""
${"<tag>this is html</tag>" | n}
""", default_filters=['h', 'unicode'])
assert t.render().strip() == "<tag>this is html</tag>"
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>" | n}
""")
assert t.render().strip() == "<tag>this is html</tag>"
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>" | n, h}
""")
assert t.render().strip() == "<tag>this is html</tag>"
def test_non_expression(self):
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'])
assert t.render().strip() == "this is a"
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${'hi'}
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'], default_filters=['b'])
assert flatten_result(t.render()) == "this is b this is b"
t = Template("""
<%!
class Foo(object):
foo = True
def __str__(self):
return "this is a"
def a(text):
return Foo()
def b(text):
if hasattr(text, 'foo'):
return str(text)
else:
return "this is b"
%>
${'hi'}
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'], default_filters=['b'])
assert flatten_result(t.render()) == "this is b this is a"
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${foo()}
${bar()}
<%def name="foo()" filter="b">
this is text
</%def>
<%def name="bar()" filter="b" buffered="True">
this is text
</%def>
""", buffer_filters=['a'])
assert flatten_result(t.render()) == "this is b this is a"
def test_builtins(self):
t = Template("""
${"this is <text>" | h}
""")
assert flatten_result(t.render()) == "this is <text>"
t = Template("""
http://foo.com/arg1=${"hi! this is a string." | u}
""")
assert flatten_result(t.render()) == "http://foo.com/arg1=hi%21+this+is+a+string."
class BufferTest(unittest.TestCase):
def test_buffered_def(self):
t = Template("""
<%def name="foo()" buffered="True">
this is foo
</%def>
${"hi->" + foo() + "<-hi"}
""")
assert flatten_result(t.render()) == "hi-> this is foo <-hi"
def test_unbuffered_def(self):
t = Template("""
<%def name="foo()" buffered="False">
this is foo
</%def>
${"hi->" + foo() + "<-hi"}
""")
assert flatten_result(t.render()) == "this is foo hi-><-hi"
def test_capture(self):
t = Template("""
<%def name="foo()" buffered="False">
this is foo
</%def>
${"hi->" + capture(foo) + "<-hi"}
""")
assert flatten_result(t.render()) == "hi-> this is foo <-hi"
def test_capture_exception(self):
template = Template("""
<%def name="a()">
this is a
<%
raise TypeError("hi")
%>
</%def>
<%
c = capture(a)
%>
a->${c}<-a
""")
try:
template.render()
assert False
except TypeError:
assert True
def test_buffered_exception(self):
template = Template("""
<%def name="a()" buffered="True">
<%
raise TypeError("hi")
%>
</%def>
${a()}
""")
try:
print(template.render())
assert False
except TypeError:
assert True
def test_capture_ccall(self):
t = Template("""
<%def name="foo()">
<%
x = capture(caller.body)
%>
this is foo. body: ${x}
</%def>
<%call expr="foo()">
ccall body
</%call>
""")
#print t.render()
assert flatten_result(t.render()) == "this is foo. body: ccall body"
|
apache-2.0
|
russomi/appengine-pipeline-read-only
|
src/pipeline/simplejson/encoder.py
|
8
|
18261
|
#!/usr/bin/python2.5
"""Implementation of JSONEncoder
"""
import re
from decimal import Decimal
def _import_speedups():
try:
from simplejson import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
# Local changes for relative import.
from decoder import PosInf
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=False):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
if isinstance(indent, (int, long)):
indent = ' ' * indent
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
if (_one_shot and c_make_encoder is not None
and not self.indent and not self.sort_keys):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
Decimal=Decimal,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
|
apache-2.0
|
summermk/dragonfly
|
dragonfly/engines/base/compiler.py
|
5
|
3942
|
#
# This file is part of Dragonfly.
# (c) Copyright 2007, 2008 by Christo Butcher
# Licensed under the LGPL.
#
# Dragonfly is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragonfly is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dragonfly. If not, see
# <http://www.gnu.org/licenses/>.
#
"""
This file implements the compiler base class.
"""
import logging
import dragonfly.grammar.elements as elements_
#---------------------------------------------------------------------------
class CompilerError(Exception):
pass
#---------------------------------------------------------------------------
class CompilerBase(object):
_log = logging.getLogger("engine.compiler")
element_compilers = [
(elements_.Sequence, lambda s,e,*a,**k: s._compile_sequence(e,*a,**k)),
(elements_.Alternative, lambda s,e,*a,**k: s._compile_alternative(e,*a,**k)),
(elements_.Optional, lambda s,e,*a,**k: s._compile_optional(e,*a,**k)),
(elements_.Literal, lambda s,e,*a,**k: s._compile_literal(e,*a,**k)),
(elements_.RuleRef, lambda s,e,*a,**k: s._compile_rule_ref(e,*a,**k)),
(elements_.ListRef, lambda s,e,*a,**k: s._compile_list_ref(e,*a,**k)),
(elements_.Dictation, lambda s,e,*a,**k: s._compile_dictation(e,*a,**k)),
(elements_.Impossible, lambda s,e,*a,**k: s._compile_impossible(e,*a,**k)),
(elements_.Empty, lambda s,e,*a,**k: s._compile_empty(e,*a,**k)),
]
#-----------------------------------------------------------------------
def __str__(self):
return "%s()" % self.__class__.__name__
#-----------------------------------------------------------------------
# Methods for compiling grammars.
def compile_grammar(self, grammar, *args, **kwargs):
raise NotImplementedError("Compiler %s not implemented." % self)
#-----------------------------------------------------------------------
# Methods for compiling elements.
def compile_element(self, element, *args, **kwargs):
# Look for a compiler method to handle the given element.
for element_type, compiler in self.element_compilers:
if isinstance(element, element_type):
compiler(self, element, *args, **kwargs)
return
# Didn't find a compiler method for this element type.
raise NotImplementedError("Compiler %s not implemented"
" for element type %s."
% (self, element))
#-----------------------------------------------------------------------
def _compile_unknown_element(self, element, *args, **kwargs):
raise NotImplementedError("Compiler %s not implemented"
" for element type %s."
% (self, element))
_compile_sequence = _compile_unknown_element
_compile_alternative = _compile_unknown_element
_compile_optional = _compile_unknown_element
_compile_literal = _compile_unknown_element
_compile_rule_ref = _compile_unknown_element
_compile_list_ref = _compile_unknown_element
_compile_dictation = _compile_unknown_element
_compile_impossible = _compile_unknown_element
_compile_empty = _compile_unknown_element
|
lgpl-3.0
|
vprime/puuuu
|
env/bin/activate_this.py
|
669
|
1129
|
"""By using execfile(this_file, dict(__file__=this_file)) you will
activate this virtualenv environment.
This can be used when you must use an existing Python interpreter, not
the virtualenv bin/python
"""
try:
__file__
except NameError:
raise AssertionError(
"You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
import sys
import os
old_os_path = os.environ['PATH']
os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if sys.platform == 'win32':
site_packages = os.path.join(base, 'Lib', 'site-packages')
else:
site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
prev_sys_path = list(sys.path)
import site
site.addsitedir(site_packages)
sys.real_prefix = sys.prefix
sys.prefix = base
# Move the added items to the front of the path:
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
|
mit
|
barachka/odoo
|
addons/document/wizard/document_configuration.py
|
381
|
4895
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class document_configuration(osv.osv_memory):
_name='document.configuration'
_description = 'Directory Configuration'
_inherit = 'res.config'
def execute(self, cr, uid, ids, context=None):
dir_pool = self.pool.get('document.directory')
data_pool = self.pool.get('ir.model.data')
model_pool = self.pool.get('ir.model')
content_pool = self.pool.get('document.directory.content')
if self.pool.get('sale.order'):
# Sale order
dir_data_id = data_pool._get_id(cr, uid, 'document', 'dir_sale_order_all')
if dir_data_id:
sale_dir_id = data_pool.browse(cr, uid, dir_data_id, context=context).res_id
else:
sale_dir_id = data_pool.create(cr, uid, {'name': 'Sale Orders'})
mid = model_pool.search(cr, uid, [('model','=','sale.order')])
dir_pool.write(cr, uid, [sale_dir_id], {
'type':'ressource',
'ressource_type_id': mid[0],
'domain': '[]',
})
# Qutation
dir_data_id = data_pool._get_id(cr, uid, 'document', 'dir_sale_order_quote')
if dir_data_id:
quta_dir_id = data_pool.browse(cr, uid, dir_data_id, context=context).res_id
else:
quta_dir_id = data_pool.create(cr, uid, {'name': 'Sale Quotations'})
dir_pool.write(cr, uid, [quta_dir_id], {
'type':'ressource',
'ressource_type_id': mid[0],
'domain': "[('state','=','draft')]",
})
# Sale Order Report
order_report_data_id = data_pool._get_id(cr, uid, 'sale', 'report_sale_order')
if order_report_data_id:
order_report_id = data_pool.browse(cr, uid, order_report_data_id, context=context).res_id
content_pool.create(cr, uid, {
'name': "Print Order",
'suffix': "_print",
'report_id': order_report_id,
'extension': '.pdf',
'include_name': 1,
'directory_id': sale_dir_id,
})
content_pool.create(cr, uid, {
'name': "Print Quotation",
'suffix': "_print",
'report_id': order_report_id,
'extension': '.pdf',
'include_name': 1,
'directory_id': quta_dir_id,
})
if self.pool.get('product.product'):
# Product
dir_data_id = data_pool._get_id(cr, uid, 'document', 'dir_product')
if dir_data_id:
product_dir_id = data_pool.browse(cr, uid, dir_data_id, context=context).res_id
else:
product_dir_id = data_pool.create(cr, uid, {'name': 'Products'})
mid = model_pool.search(cr, uid, [('model','=','product.product')])
dir_pool.write(cr, uid, [product_dir_id], {
'type':'ressource',
'ressource_type_id': mid[0],
})
if self.pool.get('account.analytic.account'):
# Project
dir_data_id = data_pool._get_id(cr, uid, 'document', 'dir_project')
if dir_data_id:
project_dir_id = data_pool.browse(cr, uid, dir_data_id, context=context).res_id
else:
project_dir_id = data_pool.create(cr, uid, {'name': 'Projects'})
mid = model_pool.search(cr, uid, [('model','=','account.analytic.account')])
dir_pool.write(cr, uid, [project_dir_id], {
'type':'ressource',
'ressource_type_id': mid[0],
'domain': '[]',
'ressource_tree': 1
})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
abaditsegay/arangodb
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32comext/axscript/test/testHost4Dbg.py
|
18
|
2565
|
import string, os, sys, traceback
from win32com.axscript import axscript
from win32com.axscript.server import axsite
from win32com.axscript.server.error import Exception
import pythoncom
from win32com.server import util
import win32ui
version = "0.0.1"
class MySite(axsite.AXSite):
def OnScriptError(self, error):
print "An error occurred in the Script Code"
exc = error.GetExceptionInfo()
try:
text = error.GetSourceLineText()
except:
text = "<unknown>"
context, line, char = error.GetSourcePosition()
print "Exception: %s (line %d)\n%s\n%s^\n%s" % (exc[1], line, text, " " * (char-1), exc[2])
class ObjectModel:
_public_methods_ = [ 'echo', 'msgbox' ]
def echo(self, *args):
print string.join(map(str, args))
def msgbox(self, *args):
msg = string.join(map(str, args))
win32ui.MessageBox(msg)
def TestEngine():
model = {'Test' : util.wrap(ObjectModel()) }
scriptDir = "."
site = MySite(model)
pyEngine = site._AddEngine("Python")
# pyEngine2 = site._AddEngine("Python")
vbEngine = site._AddEngine("VBScript")
# forthEngine = site._AddEngine("ForthScript")
try:
# code = open(os.path.join(scriptDir, "debugTest.4ths"),"rb").read()
# forthEngine.AddCode(code)
code = open(os.path.join(scriptDir, "debugTest.pys"),"rb").read()
pyEngine.AddCode(code)
code = open(os.path.join(scriptDir, "debugTest.vbs"),"rb").read()
vbEngine.AddCode(code)
# code = open(os.path.join(scriptDir, "debugTestFail.pys"),"rb").read()
# pyEngine2.AddCode(code)
# from win32com.axdebug import axdebug
# sessionProvider=pythoncom.CoCreateInstance(axdebug.CLSID_DefaultDebugSessionProvider,None,pythoncom.CLSCTX_ALL, axdebug.IID_IDebugSessionProvider)
# sessionProvider.StartDebugSession(None)
raw_input("Press enter to continue")
# forthEngine.Start()
pyEngine.Start() # Actually run the Python code
vbEngine.Start() # Actually run the VB code
except pythoncom.com_error, details:
print "Script failed: %s (0x%x)" % (details[1], details[0])
# Now run the code expected to fail!
# try:
# pyEngine2.Start() # Actually run the Python code that fails!
# print "Script code worked when it should have failed."
# except pythoncom.com_error:
# pass
site._Close()
if __name__ == '__main__':
import win32com.axdebug.util
try:
TestEngine()
except:
traceback.print_exc()
win32com.axdebug.util._dump_wrapped()
sys.exc_type = sys.exc_value = sys.exc_traceback = None
print pythoncom._GetInterfaceCount(),"com objects still alive"
|
apache-2.0
|
pansay/jasmine.github.io
|
2.1/src/python_egg.py
|
34
|
1456
|
## Using Jasmine with Python
# The Jasmine Python package contains helper code for developing Jasmine projects for Python-based web projects (Django, Flask, etc.)
# or for JavaScript projects where Python is a welcome partner. It serves up a project's Jasmine suite in a browser so you can focus on
# your code instead of manually editing script tags in the Jasmine runner HTML file.
### Install
# You can install Jasmine using pip or by adding it to your `requirements.txt`.
pip install jasmine
### \_\_init\_\_ a Project
# Initailize a project for Jasmine by creating a spec directory and configuration yaml for you.
jasmine-install
### Configuration
# Customize `spec/javascripts/support/jasmine.yml` to enumerate the source files, stylesheets, and spec files you would like the
# Jasmine runner to include. You may use dir glob strings.
### Usage
# Once you have set up your `jasmine.yml`, you can start the Jasmine server by running `jasmine`.
jasmine
# Point your browser to `localhost:8888`. The suite will run every time the page is re-loaded.
# Start Jasmine on a different port by passing the `-p` flag.
jasmine -p 1337
#### Continuous Integration
# For Continuous Integration environments, run `jasmine-ci` in the project build steps:
jasmine-ci
# The browser used by selenium can be changed by exporting `JASMINE_BROWSER`
export JASMINE_BROWSER=chrome
jasmine-ci
# or by setting the `----browser` flag.
jasmine-ci --browser firefox
|
mit
|
myles/django-issues
|
src/issues/models.py
|
1
|
3687
|
from django.db import models
from django.conf import settings
from django.db.models import permalink
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from django.contrib.comments.models import Comment
from django.contrib.contenttypes.generic import GenericRelation
class Version(models.Model):
name = models.CharField(_('name'), max_length=200)
slug = models.SlugField(_('slug'), max_length=50, unique=True)
description = models.TextField(_('description'), blank=True, null=True)
url = models.URLField(_('url'), blank=True, null=True)
date = models.DateField(_('date'), blank=True, null=True)
date_added = models.DateTimeField(_('date added'), auto_now_add=True)
date_modified = models.DateTimeField(_('date modified'), auto_now=True)
class Meta:
db_table = 'issue_versions'
ordering = ('date',)
verbose_name = _('version')
verbose_name_plural = _('versions')
def __unicode__(self):
return u"%s" % (self.name)
@permalink
def get_absolute_url(self):
return ('issues_version_detail', None, {
'slug': self.slug,
})
class Category(models.Model):
name = models.CharField(_('name'), max_length=200)
slug = models.SlugField(_('slug'), max_length=50, unique=True)
assigned = models.ForeignKey(User, blank=True, null=True,
related_name='category_assigned')
date_added = models.DateTimeField(_('date added'), auto_now_add=True)
date_modified = models.DateTimeField(_('date modified'), auto_now=True)
class Meta:
db_table = 'issue_categories'
ordering = ('name',)
verbose_name = _('category')
verbose_name_plural = _('categories')
def __unicode__(self):
return u"%s" % (self.name)
@permalink
def get_absolute_url(self):
return ('issue_category_detail', None, {
'slug': self.slug,
})
ISSUE_STATUSES = (
(1, _('New')),
(2, _('Assigned')),
(3, _('Resolved')),
(4, _('Feedback')),
(5, _('Closed')),
(6, _('Rejected')),
)
ISSUE_PRIORITIES = (
(1, _('Low')),
(2, _('Normal')),
(3, _('High')),
(4, _('Urgent')),
(5, _('Immediate')),
)
# TODO I want this it first check the `settings.py` file for customization.
ISSUE_STATUS_CHOICES = ISSUE_STATUSES
ISSUE_PRIORITIY_CHOICES = ISSUE_PRIORITIES
class Issue(models.Model):
subject = models.CharField(_('subject'), max_length=200)
description = models.TextField(_('description'), blank=True, null=True)
version = models.ForeignKey(Version, blank=True, null=True)
category = models.ForeignKey(Category, blank=True, null=True)
created = models.ForeignKey(User, related_name='created')
assigned = models.ForeignKey(User, blank=True, null=True,
related_name='issue_assigned')
watcher = models.ManyToManyField(User, blank=True, null=True,
related_name='watchers')
start_date = models.DateField(_('start'), blank=True, null=True,
help_text=_('The date to start working on the issue.'))
due_date = models.DateField(_('due date'), blank=True, null=True,
help_text=_('The date the issue is due.'))
status = models.IntegerField(_('status'), choices=ISSUE_STATUS_CHOICES,
default=1)
priority = models.IntegerField(_('priority'),
choices=ISSUE_PRIORITIY_CHOICES, default=2)
comment = GenericRelation(Comment, object_id_field='object_pk')
date_added = models.DateTimeField(_('date added'), auto_now_add=True)
date_modified = models.DateTimeField(_('date modified'), auto_now=True)
class Meta:
db_table = 'issues'
ordering = ('due_date', 'priority',)
verbose_name = _('issue')
verbose_name_plural = _('issues')
def __unicode__(self):
return u"[%s] %s" % (self.pk, self.subject)
@permalink
def get_absolute_url(self):
return ('issues_issue_detail', None, {
'pk': self.pk,
})
|
bsd-3-clause
|
alexanderturner/ansible
|
lib/ansible/modules/utilities/logic/fail.py
|
38
|
1597
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Dag Wieers <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = '''
---
module: fail
short_description: Fail with custom message
description:
- This module fails the progress with a custom message. It can be
useful for bailing out when a certain condition is met using C(when).
version_added: "0.8"
options:
msg:
description:
- The customized message used for failing execution. If omitted,
fail will simple bail out with a generic message.
required: false
default: "'Failed as requested from task'"
author: "Dag Wieers (@dagwieers)"
'''
EXAMPLES = '''
# Example playbook using fail and when together
- fail:
msg: "The system may not be provisioned according to the CMDB status."
when: cmdb_status != "to-be-staged"
'''
|
gpl-3.0
|
MarcJoan/django
|
tests/signing/tests.py
|
74
|
5473
|
from __future__ import unicode_literals
import datetime
from django.core import signing
from django.test import SimpleTestCase
from django.test.utils import freeze_time
from django.utils import six
from django.utils.encoding import force_str
class TestSigner(SimpleTestCase):
def test_signature(self):
"signature() method should generate a signature"
signer = signing.Signer('predictable-secret')
signer2 = signing.Signer('predictable-secret2')
for s in (
b'hello',
b'3098247:529:087:',
'\u2019'.encode('utf-8'),
):
self.assertEqual(
signer.signature(s),
signing.base64_hmac(signer.salt + 'signer', s,
'predictable-secret').decode()
)
self.assertNotEqual(signer.signature(s), signer2.signature(s))
def test_signature_with_salt(self):
"signature(value, salt=...) should work"
signer = signing.Signer('predictable-secret', salt='extra-salt')
self.assertEqual(
signer.signature('hello'),
signing.base64_hmac('extra-salt' + 'signer',
'hello', 'predictable-secret').decode()
)
self.assertNotEqual(
signing.Signer('predictable-secret', salt='one').signature('hello'),
signing.Signer('predictable-secret', salt='two').signature('hello'))
def test_sign_unsign(self):
"sign/unsign should be reversible"
signer = signing.Signer('predictable-secret')
examples = [
'q;wjmbk;wkmb',
'3098247529087',
'3098247:529:087:',
'jkw osanteuh ,rcuh nthu aou oauh ,ud du',
'\u2019',
]
if six.PY2:
examples.append(b'a byte string')
for example in examples:
signed = signer.sign(example)
self.assertIsInstance(signed, str)
self.assertNotEqual(force_str(example), signed)
self.assertEqual(example, signer.unsign(signed))
def unsign_detects_tampering(self):
"unsign should raise an exception if the value has been tampered with"
signer = signing.Signer('predictable-secret')
value = 'Another string'
signed_value = signer.sign(value)
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
self.assertEqual(value, signer.unsign(signed_value))
for transform in transforms:
self.assertRaises(
signing.BadSignature, signer.unsign, transform(signed_value))
def test_dumps_loads(self):
"dumps and loads be reversible for any JSON serializable object"
objects = [
['a', 'list'],
'a unicode string \u2019',
{'a': 'dictionary'},
]
if six.PY2:
objects.append(b'a byte string')
for o in objects:
self.assertNotEqual(o, signing.dumps(o))
self.assertEqual(o, signing.loads(signing.dumps(o)))
self.assertNotEqual(o, signing.dumps(o, compress=True))
self.assertEqual(o, signing.loads(signing.dumps(o, compress=True)))
def test_decode_detects_tampering(self):
"loads should raise exception for tampered objects"
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
value = {
'foo': 'bar',
'baz': 1,
}
encoded = signing.dumps(value)
self.assertEqual(value, signing.loads(encoded))
for transform in transforms:
self.assertRaises(
signing.BadSignature, signing.loads, transform(encoded))
def test_works_with_non_ascii_keys(self):
binary_key = b'\xe7' # Set some binary (non-ASCII key)
s = signing.Signer(binary_key)
self.assertEqual('foo:6NB0fssLW5RQvZ3Y-MTerq2rX7w', s.sign('foo'))
def test_valid_sep(self):
separators = ['/', '*sep*', ',']
for sep in separators:
signer = signing.Signer('predictable-secret', sep=sep)
self.assertEqual('foo%ssH9B01cZcJ9FoT_jEVkRkNULrl8' % sep, signer.sign('foo'))
def test_invalid_sep(self):
"""should warn on invalid separator"""
msg = 'Unsafe Signer separator: %r (cannot be empty or consist of only A-z0-9-_=)'
separators = ['', '-', 'abc']
for sep in separators:
with self.assertRaisesMessage(ValueError, msg % sep):
signing.Signer(sep=sep)
class TestTimestampSigner(SimpleTestCase):
def test_timestamp_signer(self):
value = 'hello'
with freeze_time(123456789):
signer = signing.TimestampSigner('predictable-key')
ts = signer.sign(value)
self.assertNotEqual(ts,
signing.Signer('predictable-key').sign(value))
self.assertEqual(signer.unsign(ts), value)
with freeze_time(123456800):
self.assertEqual(signer.unsign(ts, max_age=12), value)
# max_age parameter can also accept a datetime.timedelta object
self.assertEqual(signer.unsign(ts, max_age=datetime.timedelta(seconds=11)), value)
self.assertRaises(signing.SignatureExpired, signer.unsign, ts, max_age=10)
|
bsd-3-clause
|
drcapulet/sentry
|
src/sentry/migrations/0103_ensure_non_empty_slugs.py
|
30
|
26927
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
from sentry.constants import RESERVED_TEAM_SLUGS
from sentry.models import slugify_instance
for team in orm['sentry.Team'].objects.filter(models.Q(slug='') | models.Q(slug__isnull=True)):
slugify_instance(team, team.name, reserved=RESERVED_TEAM_SLUGS)
team.save()
for project in orm['sentry.Project'].objects.filter(models.Q(slug='') | models.Q(slug__isnull=True)):
slugify_instance(project, project.name, reserved=RESERVED_TEAM_SLUGS)
project.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'})
},
u'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': u"orm['sentry.AlertRelatedGroup']", 'to': u"orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
u'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
u'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
u'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"})
},
u'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
u'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': u"orm['sentry.User']"})
},
u'sentry.groupcountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
u'sentry.grouptag': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'unique': 'True'})
},
u'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': u"orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']", 'null': 'True'})
},
u'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': u"orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': u"orm['sentry.User']"})
},
u'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'token_set'", 'to': u"orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': u"orm['sentry.TeamMember']", 'to': u"orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': u"orm['sentry.User']"})
},
u'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry']
symmetrical = True
|
bsd-3-clause
|
luistorresm/odoo
|
addons/website_sale/models/sale_order.py
|
81
|
10744
|
# -*- coding: utf-8 -*-
import random
from openerp import SUPERUSER_ID
from openerp.osv import osv, orm, fields
from openerp.addons.web.http import request
from openerp.tools.translate import _
class sale_order(osv.Model):
_inherit = "sale.order"
def _cart_qty(self, cr, uid, ids, field_name, arg, context=None):
res = dict()
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = int(sum(l.product_uom_qty for l in (order.website_order_line or [])))
return res
_columns = {
'website_order_line': fields.one2many(
'sale.order.line', 'order_id',
string='Order Lines displayed on Website', readonly=True,
help='Order Lines to be displayed on the website. They should not be used for computation purpose.',
),
'cart_quantity': fields.function(_cart_qty, type='integer', string='Cart Quantity'),
'payment_acquirer_id': fields.many2one('payment.acquirer', 'Payment Acquirer', on_delete='set null', copy=False),
'payment_tx_id': fields.many2one('payment.transaction', 'Transaction', on_delete='set null', copy=False),
}
def _get_errors(self, cr, uid, order, context=None):
return []
def _get_website_data(self, cr, uid, order, context):
return {
'partner': order.partner_id.id,
'order': order
}
def _cart_find_product_line(self, cr, uid, ids, product_id=None, line_id=None, context=None, **kwargs):
for so in self.browse(cr, uid, ids, context=context):
domain = [('order_id', '=', so.id), ('product_id', '=', product_id)]
if line_id:
domain += [('id', '=', line_id)]
return self.pool.get('sale.order.line').search(cr, SUPERUSER_ID, domain, context=context)
def _website_product_id_change(self, cr, uid, ids, order_id, product_id, qty=0, line_id=None, context=None):
so = self.pool.get('sale.order').browse(cr, uid, order_id, context=context)
values = self.pool.get('sale.order.line').product_id_change(cr, SUPERUSER_ID, [],
pricelist=so.pricelist_id.id,
product=product_id,
partner_id=so.partner_id.id,
fiscal_position=so.fiscal_position.id,
qty=qty,
context=dict(context or {}, company_id=so.company_id.id)
)['value']
if line_id:
line = self.pool.get('sale.order.line').browse(cr, SUPERUSER_ID, line_id, context=context)
values['name'] = line.name
else:
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
values['name'] = product.description_sale and "%s\n%s" % (product.display_name, product.description_sale) or product.display_name
values['product_id'] = product_id
values['order_id'] = order_id
if values.get('tax_id') != None:
values['tax_id'] = [(6, 0, values['tax_id'])]
return values
def _cart_update(self, cr, uid, ids, product_id=None, line_id=None, add_qty=0, set_qty=0, context=None, **kwargs):
""" Add or set product quantity, add_qty can be negative """
sol = self.pool.get('sale.order.line')
quantity = 0
for so in self.browse(cr, uid, ids, context=context):
if so.state != 'draft':
request.session['sale_order_id'] = None
raise osv.except_osv(_('Error!'), _('It is forbidden to modify a sale order which is not in draft status'))
if line_id != False:
line_ids = so._cart_find_product_line(product_id, line_id, context=context, **kwargs)
if line_ids:
line_id = line_ids[0]
# Create line if no line with product_id can be located
if not line_id:
values = self._website_product_id_change(cr, uid, ids, so.id, product_id, qty=1, context=context)
line_id = sol.create(cr, SUPERUSER_ID, values, context=context)
if add_qty:
add_qty -= 1
# compute new quantity
if set_qty:
quantity = set_qty
elif add_qty != None:
quantity = sol.browse(cr, SUPERUSER_ID, line_id, context=context).product_uom_qty + (add_qty or 0)
# Remove zero of negative lines
if quantity <= 0:
sol.unlink(cr, SUPERUSER_ID, [line_id], context=context)
else:
# update line
values = self._website_product_id_change(cr, uid, ids, so.id, product_id, qty=quantity, line_id=line_id, context=context)
values['product_uom_qty'] = quantity
sol.write(cr, SUPERUSER_ID, [line_id], values, context=context)
return {'line_id': line_id, 'quantity': quantity}
def _cart_accessories(self, cr, uid, ids, context=None):
for order in self.browse(cr, uid, ids, context=context):
s = set(j.id for l in (order.website_order_line or []) for j in (l.product_id.accessory_product_ids or []))
s -= set(l.product_id.id for l in order.order_line)
product_ids = random.sample(s, min(len(s),3))
return self.pool['product.product'].browse(cr, uid, product_ids, context=context)
class website(orm.Model):
_inherit = 'website'
_columns = {
'pricelist_id': fields.related('user_id','partner_id','property_product_pricelist',
type='many2one', relation='product.pricelist', string='Default Pricelist'),
'currency_id': fields.related('pricelist_id','currency_id',
type='many2one', relation='res.currency', string='Default Currency'),
}
def sale_product_domain(self, cr, uid, ids, context=None):
return [("sale_ok", "=", True)]
def sale_get_order(self, cr, uid, ids, force_create=False, code=None, update_pricelist=None, context=None):
sale_order_obj = self.pool['sale.order']
sale_order_id = request.session.get('sale_order_id')
sale_order = None
# create so if needed
if not sale_order_id and (force_create or code):
# TODO cache partner_id session
partner = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id
for w in self.browse(cr, uid, ids):
values = {
'user_id': w.user_id.id,
'partner_id': partner.id,
'pricelist_id': partner.property_product_pricelist.id,
'section_id': self.pool.get('ir.model.data').get_object_reference(cr, uid, 'website', 'salesteam_website_sales')[1],
}
sale_order_id = sale_order_obj.create(cr, SUPERUSER_ID, values, context=context)
values = sale_order_obj.onchange_partner_id(cr, SUPERUSER_ID, [], partner.id, context=context)['value']
sale_order_obj.write(cr, SUPERUSER_ID, [sale_order_id], values, context=context)
request.session['sale_order_id'] = sale_order_id
if sale_order_id:
# TODO cache partner_id session
partner = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id
sale_order = sale_order_obj.browse(cr, SUPERUSER_ID, sale_order_id, context=context)
if not sale_order.exists():
request.session['sale_order_id'] = None
return None
# check for change of pricelist with a coupon
if code and code != sale_order.pricelist_id.code:
pricelist_ids = self.pool['product.pricelist'].search(cr, SUPERUSER_ID, [('code', '=', code)], context=context)
if pricelist_ids:
pricelist_id = pricelist_ids[0]
request.session['sale_order_code_pricelist_id'] = pricelist_id
update_pricelist = True
pricelist_id = request.session.get('sale_order_code_pricelist_id') or partner.property_product_pricelist.id
# check for change of partner_id ie after signup
if sale_order.partner_id.id != partner.id and request.website.partner_id.id != partner.id:
flag_pricelist = False
if pricelist_id != sale_order.pricelist_id.id:
flag_pricelist = True
fiscal_position = sale_order.fiscal_position and sale_order.fiscal_position.id or False
values = sale_order_obj.onchange_partner_id(cr, SUPERUSER_ID, [sale_order_id], partner.id, context=context)['value']
if values.get('fiscal_position'):
order_lines = map(int,sale_order.order_line)
values.update(sale_order_obj.onchange_fiscal_position(cr, SUPERUSER_ID, [],
values['fiscal_position'], [[6, 0, order_lines]], context=context)['value'])
values['partner_id'] = partner.id
sale_order_obj.write(cr, SUPERUSER_ID, [sale_order_id], values, context=context)
if flag_pricelist or values.get('fiscal_position', False) != fiscal_position:
update_pricelist = True
# update the pricelist
if update_pricelist:
values = {'pricelist_id': pricelist_id}
values.update(sale_order.onchange_pricelist_id(pricelist_id, None)['value'])
sale_order.write(values)
for line in sale_order.order_line:
if line.exists():
sale_order._cart_update(product_id=line.product_id.id, line_id=line.id, add_qty=0)
# update browse record
if (code and code != sale_order.pricelist_id.code) or sale_order.partner_id.id != partner.id:
sale_order = sale_order_obj.browse(cr, SUPERUSER_ID, sale_order.id, context=context)
return sale_order
def sale_get_transaction(self, cr, uid, ids, context=None):
transaction_obj = self.pool.get('payment.transaction')
tx_id = request.session.get('sale_transaction_id')
if tx_id:
tx_ids = transaction_obj.search(cr, SUPERUSER_ID, [('id', '=', tx_id), ('state', 'not in', ['cancel'])], context=context)
if tx_ids:
return transaction_obj.browse(cr, SUPERUSER_ID, tx_ids[0], context=context)
else:
request.session['sale_transaction_id'] = False
return False
def sale_reset(self, cr, uid, ids, context=None):
request.session.update({
'sale_order_id': False,
'sale_transaction_id': False,
'sale_order_code_pricelist_id': False,
})
|
agpl-3.0
|
l11x0m7/lightnn
|
lightnn/test/cnn_gradient_check.py
|
1
|
5027
|
# -*- encoding:utf-8 -*-
import sys
sys.path.append('../../')
import numpy as np
from lightnn.layers.convolutional import Conv2d
from lightnn.layers.pooling import MaxPoolingLayer, AvgPoolingLayer
from lightnn.base.activations import Sigmoid, Relu, Identity
from lightnn.base.initializers import xavier_uniform_initializer
def conv_gradient_check():
"""
gradient check for convolution layer
"""
activator = Relu()
def init_test():
a = np.array(
[[[0,1,1,0,2],
[2,2,2,2,1],
[1,0,0,2,0],
[0,1,1,0,0],
[1,2,0,0,2]],
[[1,0,2,2,0],
[0,0,0,2,0],
[1,2,1,2,1],
[1,0,0,0,0],
[1,2,1,1,1]],
[[2,1,2,0,0],
[1,0,0,1,0],
[0,2,1,0,1],
[0,1,2,2,2],
[2,1,0,0,1]]])
a = a.transpose([1,2,0])
a = np.expand_dims(a, 0)
# debug point : when `stride` is `[1, 1]`, the function runs in error states
cl = Conv2d((3,3), 2, (1,5,5,3), 1, [1, 1], activator=activator,
initializer=xavier_uniform_initializer)
cl.filters[0].weights = np.array(
[[[-1,1,0],
[0,1,0],
[0,1,1]],
[[-1,-1,0],
[0,0,0],
[0,-1,0]],
[[0,0,-1],
[0,1,0],
[1,-1,-1]]], dtype=np.float64).transpose([1,2,0])
cl.filters[0].b=1
cl.filters[1].W = np.array(
[[[1,1,-1],
[-1,-1,1],
[0,-1,1]],
[[0,1,0],
[-1,0,-1],
[-1,1,0]],
[[-1,0,0],
[-1,0,1],
[-1,0,0]]], dtype=np.float64).transpose([1,2,0])
return a, cl
"""
gradient check
"""
# 设计一个误差函数,取所有节点输出项之和
error_function = lambda o : np.sum(o) / 2
# 计算forward值
a, cl = init_test()
output = cl.forward(a)
print np.transpose(output, [0, 3, 1, 2])
# 求取sensitivity map,是一个全1数组
sensitivity_array = np.ones(cl.output.shape,
dtype=np.float64) / 2
# 计算梯度
cl.backward(sensitivity_array)
# 检查梯度
epsilon = 1e-4
for d in range(cl.filters[0].delta_W.shape[0]):
for i in range(cl.filters[0].delta_W.shape[1]):
for j in range(cl.filters[0].delta_W.shape[2]):
cl.filters[0].W[d,i,j] += epsilon
cl.forward(a)
err1 = error_function(cl.output)
cl.filters[0].W[d,i,j] -= 2*epsilon
cl.forward(a)
err2 = error_function(cl.output)
expect_grad = (err1 - err2) / (2 * epsilon)
cl.filters[0].W[d,i,j] += epsilon
print 'weights(%d,%d,%d): expected - actural %f - %f' % (
d, i, j, expect_grad, cl.filters[0].delta_W[d,i,j])
cl.filters[0].b += epsilon
cl.forward(a)
err1 = error_function(cl.output)
cl.filters[0].b -= 2*epsilon
cl.forward(a)
err2 = error_function(cl.output)
expect_grad = (err1 - err2) / (2 * epsilon)
cl.filters[0].b += epsilon
print 'biases(%d,%d,%d): expected - actural %f - %f' % (
d, i, j, expect_grad, cl.filters[0].delta_b)
def max_pool_gradient_check():
"""
gradient check for max pooling layer
"""
a = np.array(
[[[0,1,1,0,2],
[2,2,2,2,1],
[1,0,0,2,0],
[0,1,1,0,0],
[1,2,0,0,2]],
[[1,0,2,2,0],
[0,0,0,2,0],
[1,2,1,2,1],
[1,0,0,0,0],
[1,2,1,1,1]],
[[2,1,2,0,0],
[1,0,0,1,0],
[0,2,1,0,1],
[0,1,2,2,2],
[2,1,0,0,1]]]).transpose([1,2,0])
a = np.expand_dims(a, 0)
mp = MaxPoolingLayer((2,2), (1,5,5,3), [1,1], 0)
output = mp.forward(a)
print output.transpose((0,3,1,2))
sensitivity_array = np.ones(mp.output.shape,
dtype=np.float64)
delta = mp.backward(sensitivity_array)
print delta.transpose([0,3,1,2])
def avg_pool_gradient_check():
"""
gradient check for avg pooling layer
"""
a = np.array(
[[[0,1,1,0,2],
[2,2,2,2,1],
[1,0,0,2,0],
[0,1,1,0,0],
[1,2,0,0,2]],
[[1,0,2,2,0],
[0,0,0,2,0],
[1,2,1,2,1],
[1,0,0,0,0],
[1,2,1,1,1]],
[[2,1,2,0,0],
[1,0,0,1,0],
[0,2,1,0,1],
[0,1,2,2,2],
[2,1,0,0,1]]]).transpose([1,2,0])
a = np.expand_dims(a, 0)
mp = AvgPoolingLayer((2,2), (1,5,5,3), [1,1], 0)
output = mp.forward(a)
print output.transpose([0,3,1,2])
sensitivity_array = np.ones(mp.output.shape,
dtype=np.float64)
delta = mp.backward(sensitivity_array)
print delta.transpose([0,3,1,2])
if __name__ == '__main__':
conv_gradient_check()
max_pool_gradient_check()
avg_pool_gradient_check()
|
apache-2.0
|
dakcarto/suite-qgis-plugin
|
src/opengeo/gui/qgsexploreritems.py
|
1
|
16869
|
import os
import sys
from PyQt4 import QtGui,QtCore
from PyQt4.QtCore import *
from opengeo.gui.exploreritems import TreeItem
from opengeo.qgis import layers as qgislayers
from dialogs.styledialog import PublishStyleDialog
from opengeo.qgis.catalog import OGCatalog
from opengeo.gui.catalogselector import selectCatalog
from dialogs.layerdialog import PublishLayersDialog, PublishLayerDialog
from dialogs.projectdialog import PublishProjectDialog
from opengeo.gui.dialogs.importvector import ImportIntoPostGISDialog
from opengeo import config
from geoserver.catalog import ConflictingDataError
from opengeo.gui.confirm import publishLayer
from opengeo.gui.dialogs.metatoolseditor import MetatoolsEditor
from opengeo.metadata.metadata_provider import MetadataProvider
from qgis.core import *
class QgsTreeItem(TreeItem):
def iconPath(self):
return os.path.dirname(__file__) + "/../images/qgis.png"
class QgsProjectItem(QgsTreeItem):
def __init__(self):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/qgis.png")
TreeItem.__init__(self, None, icon, "QGIS project")
def populate(self):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/layer.png")
layersItem = QgsTreeItem(None, icon, "QGIS Layers")
layersItem.setIcon(0, icon)
layers = qgislayers.getAllLayers()
for layer in layers:
layerItem = QgsLayerItem(layer)
layersItem.addChild(layerItem)
self.addChild(layersItem)
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/group.gif")
groupsItem = QgsTreeItem(None, icon, "QGIS Groups")
groups = qgislayers.getGroups()
for group in groups:
groupItem = QgsGroupItem(group)
groupsItem.addChild(groupItem)
groupItem.populate()
self.addChild(groupsItem)
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/style.png")
stylesItem = QgsTreeItem(None, icon, "QGIS Styles")
stylesItem.setIcon(0, icon)
styles = qgislayers.getAllLayers()
for style in styles:
styleItem = QgsStyleItem(style)
stylesItem.addChild(styleItem)
self.addChild(stylesItem)
def contextMenuActions(self, tree, explorer):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/publish-to-geoserver.png")
publishProjectAction = QtGui.QAction(icon, "Publish...", explorer)
publishProjectAction.triggered.connect(lambda: self.publishProject(tree, explorer))
publishProjectAction.setEnabled(len(explorer.catalogs())>0)
return [publishProjectAction]
def publishProject(self, tree, explorer):
layers = qgislayers.getAllLayers()
dlg = PublishProjectDialog(explorer.catalogs())
dlg.exec_()
catalog = dlg.catalog
if catalog is None:
return
workspace = dlg.workspace
groupName = dlg.groupName
explorer.setProgressMaximum(len(layers), "Publish layers")
progress = 0
ogcat = OGCatalog(catalog)
for layer in layers:
explorer.setProgress(progress)
if not explorer.run(publishLayer,
None,
[],
ogcat, layer, workspace, True):
explorer.setProgress(0)
return
progress += 1
explorer.setProgress(progress)
explorer.resetActivity()
groups = qgislayers.getGroups()
for group in groups:
names = [layer.name() for layer in groups[group]]
try:
layergroup = catalog.create_layergroup(group, names, names)
explorer.run(catalog.save, "Create layer group '" + group + "'",
[], layergroup)
except ConflictingDataError, e:
explorer.setWarning(str(e))
if groupName is not None:
names = [layer.name() for layer in layers]
layergroup = catalog.create_layergroup(groupName, names, names)
explorer.run(catalog.save, "Create global layer group",
[], layergroup)
tree.findAllItems(catalog)[0].refreshContent(explorer)
class QgsLayerItem(QgsTreeItem):
def __init__(self, layer ):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/layer.png")
TreeItem.__init__(self, layer, icon)
self.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsDragEnabled)
def contextMenuActions(self, tree, explorer):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/publish-to-geoserver.png")
publishLayerAction = QtGui.QAction(icon, "Publish to GeoServer...", explorer)
publishLayerAction.triggered.connect(lambda: self.publishLayer(tree, explorer))
publishLayerAction.setEnabled(len(explorer.catalogs())>0)
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/create-store-from-layer.png")
createStoreFromLayerAction= QtGui.QAction(icon, "Create store from layer...", explorer)
createStoreFromLayerAction.triggered.connect(lambda: self.createStoreFromLayer(tree, explorer))
createStoreFromLayerAction.setEnabled(len(explorer.catalogs())>0)
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/metadata.png")
editMetadataAction = QtGui.QAction(icon, "Edit/view layer metadata...", explorer)
editMetadataAction.triggered.connect(lambda: self.editMetadata(tree, explorer))
editMetadataAction.setEnabled(True)
actions = [publishLayerAction, createStoreFromLayerAction, editMetadataAction]
if isinstance(self.element, QgsVectorLayer):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/postgis_import.png")
importToPostGisAction = QtGui.QAction(icon, "Import into PostGIS...", explorer)
importToPostGisAction.triggered.connect(lambda: self.importLayerToPostGis(tree, explorer))
importToPostGisAction.setEnabled(len(explorer.pgDatabases())>0)
actions.append(importToPostGisAction)
return actions
def editMetadata(self, tree, explorer):
try:
md = MetadataProvider.getProvider(self.element)
except Exception, e:
explorer.setWarning(unicode(e.args[0]))
return
self.dlg = MetatoolsEditor(config.iface.mainWindow())
try:
self.dlg.setContent(md, self.element)
self.dlg.show()
except Exception, e:
explorer.setWarning("Cannot open layer metadata: " + unicode(e.args[0]))
def importMetadataFromFile(self, layers, explorer):
fileName = QtGui.QFileDialog.getOpenFileName(config.iface.mainWindow(),
"Select metadata file",
"",
'XML files (*.xml);;Text files (*.txt *.TXT);;All files (*.*)'
)
if fileName:
try:
for layer in layers:
md = MetadataProvider.getProvider(layer)
md.importFromFile(unicode(fileName))
explorer.setInfo("Metadata was imported successfully")
except Exception, e:
explorer.setWarning("Metadata can't be imported: " + e.args[0])
def multipleSelectionContextMenuActions(self, tree, explorer, selected):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/publish-to-geoserver.png")
publishLayersAction = QtGui.QAction(icon, "Publish to GeoServer...", explorer)
publishLayersAction.triggered.connect(lambda: self.publishLayers(tree, explorer, selected))
publishLayersAction.setEnabled(len(explorer.catalogs())>0)
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/create-store-from-layer.png")
createStoresFromLayersAction= QtGui.QAction(icon, "Create stores from layers...", explorer)
createStoresFromLayersAction.triggered.connect(lambda: self.createStoresFromLayers(tree, explorer, selected))
createStoresFromLayersAction.setEnabled(len(explorer.catalogs())>0)
importToPostGisAction = QtGui.QAction("Import into PostGIS...", explorer)
importToPostGisAction.triggered.connect(lambda: self.importLayersToPostGis(tree, explorer, selected))
importToPostGisAction.setEnabled(len(explorer.pgDatabases())>0)
importMetadataAction = QtGui.QAction("Import metadata from file...", explorer)
importMetadataAction.triggered.connect(lambda: self.importMetadataFromFile([item.element for item in selected], explorer))
importMetadataAction.setEnabled(True)
return [publishLayersAction, createStoresFromLayersAction, importToPostGisAction, importMetadataAction]
def publishLayers(self, tree, explorer, selected):
layers = [item.element for item in selected]
dlg = PublishLayersDialog(explorer.catalogs(), layers)
dlg.exec_()
toPublish = dlg.topublish
if toPublish is None:
return
explorer.setProgressMaximum(len(toPublish), "Publish layers")
progress = 0
toUpdate = set();
for layer, catalog, workspace in toPublish:
explorer.setProgress(progress)
ogcat = OGCatalog(catalog)
if explorer.run(publishLayer,
None,
[],
ogcat, layer, workspace, True):
toUpdate.add(tree.findAllItems(catalog)[0])
progress += 1
explorer.setProgress(progress)
for item in toUpdate:
item.refreshContent(explorer)
explorer.resetActivity()
def importLayerToPostGis(self, tree, explorer):
self.importLayersToPostGis(tree, explorer, [self])
def importLayersToPostGis(self, tree, explorer, selected):
layers = [item.element for item in selected]
dlg = ImportIntoPostGISDialog(explorer.pgDatabases(), toImport = layers)
dlg.exec_()
if dlg.ok:
schema = [s for s in dlg.connection.schemas() if s.name == dlg.schema][0]
if len(dlg.toImport) > 1:
explorer.setProgressMaximum(len(dlg.toImport), "Import layers to PostGIS")
for i, layer in enumerate(dlg.toImport):
explorer.run(dlg.connection.importFileOrLayer,
"Import layer into PostGIS",
tree.findAllItems(schema),
layer, dlg.schema, dlg.tablename, not dlg.add, dlg.single)
explorer.setProgress(i + 1)
explorer.resetActivity()
def createStoresFromLayers(self, tree, explorer, selected):
layers = [item.element for item in selected]
dlg = PublishLayersDialog(explorer.catalogs(), layers)
dlg.exec_()
toPublish = dlg.topublish
if toPublish is None:
return
explorer.setProgressMaximum(len(toPublish), "Upload layers")
progress = 0
toUpdate = set();
for layer, catalog, workspace in toPublish:
explorer.setProgress(progress)
ogcat = OGCatalog(catalog)
explorer.run(ogcat.upload,
None,
[],
layer, workspace, True)
progress += 1
toUpdate.add(tree.findAllItems(catalog))
explorer.setProgress(progress)
for item in toUpdate:
item.refreshContent(explorer)
explorer.resetActivity()
def createStoreFromLayer(self, tree, explorer):
dlg = PublishLayerDialog(explorer.catalogs())
dlg.exec_()
if dlg.catalog is None:
return
cat = dlg.catalog
ogcat = OGCatalog(cat)
catItem = tree.findAllItems(cat)[0]
toUpdate = [catItem]
explorer.run(ogcat.upload,
"Create store from layer '" + self.element.name() + "'",
toUpdate,
self.element, dlg.workspace, True)
def publishLayer(self, tree, explorer):
dlg = PublishLayerDialog(explorer.catalogs())
dlg.exec_()
if dlg.catalog is None:
return
cat = dlg.catalog
ogcat = OGCatalog(cat)
catItem = tree.findAllItems(cat)[0]
explorer.run(publishLayer,
"Publish layer '" + self.element.name() + "'",
[catItem],
ogcat, self.element, dlg.workspace, True)
class QgsGroupItem(QgsTreeItem):
def __init__(self, group):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/group.gif")
TreeItem.__init__(self, group , icon)
self.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsDragEnabled)
def populate(self):
grouplayers = qgislayers.getGroups()[self.element]
for layer in grouplayers:
layerItem = QgsLayerItem(layer)
self.addChild(layerItem)
def contextMenuActions(self, tree, explorer):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/publish-to-geoserver.png")
publishGroupAction = QtGui.QAction(icon, "Publish...", explorer)
publishGroupAction.triggered.connect(lambda: self.publishGroup(tree, explorer))
publishGroupAction.setEnabled(len(explorer.catalogs())>0)
return[publishGroupAction]
def publishGroup(self, tree, explorer):
groupname = self.element
groups = qgislayers.getGroups()
group = groups[groupname]
cat = selectCatalog(explorer.catalogs())
if cat is None:
return
gslayers= [layer.name for layer in cat.get_layers()]
missing = []
overwrite = bool(QSettings().value("/OpenGeo/Settings/GeoServer/OverwriteGroupLayers", True, bool))
for layer in group:
if layer.name() not in gslayers or overwrite:
missing.append(layer)
toUpdate = set();
toUpdate.add(tree.findAllItems(cat)[0])
if missing:
catalogs = dict([(k ,v) for k, v in explorer.catalogs().iteritems() if v == cat])
dlg = PublishLayersDialog(catalogs, missing)
dlg.exec_()
toPublish = dlg.topublish
if toPublish is None:
return
explorer.setProgressMaximum(len(toPublish), "Publish layers")
progress = 0
for layer, catalog, workspace in toPublish:
explorer.setProgress(progress)
ogcat = OGCatalog(catalog)
if not explorer.run(ogcat.publishLayer,
None,
[],
layer, workspace, True):
explorer.setProgress(0)
return
progress += 1
explorer.setProgress(progress)
explorer.resetActivity()
names = [layer.name() for layer in group]
def _createGroup():
layergroup = cat.create_layergroup(groupname, names, names)
cat.save(layergroup)
explorer.run(_createGroup, "Create layer group from group '" + groupname + "'",
toUpdate)
class QgsStyleItem(QgsTreeItem):
def __init__(self, layer):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/style.png")
TreeItem.__init__(self, layer, icon, "Style of layer '" + layer.name() + "'")
self.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsDragEnabled)
def contextMenuActions(self, tree, explorer):
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/publish-to-geoserver.png")
publishStyleAction = QtGui.QAction(icon, "Publish...", explorer)
publishStyleAction.triggered.connect(lambda: self.publishStyle(tree, explorer))
publishStyleAction.setEnabled(len(explorer.catalogs()) > 0)
icon = QtGui.QIcon(os.path.dirname(__file__) + "/../images/edit.png")
editAction = QtGui.QAction(icon, "Edit...", explorer)
editAction.triggered.connect(lambda: config.iface.showLayerProperties(self.element))
return [publishStyleAction, editAction]
def publishStyle(self, tree, explorer):
dlg = PublishStyleDialog(explorer.catalogs().keys())
dlg.exec_()
if dlg.catalog is None:
return
cat = explorer.catalogs()[dlg.catalog]
ogcat = OGCatalog(cat)
catItem = tree.findAllItems(cat)[0]
toUpdate = [catItem.stylesItem]
explorer.run(ogcat.publishStyle,
"Publish style from layer '" + self.element.name() + "'",
toUpdate,
self.element, True, dlg.name)
|
gpl-2.0
|
fw1121/ete
|
sdoc/face_grid.py
|
2
|
2228
|
from ete2 import Tree, TextFace, NodeStyle, TreeStyle
t = Tree("((a,b),c);")
right_c0_r0 = TextFace("right_col0_row0")
right_c0_r1 = TextFace("right_col0_row1")
right_c1_r0 = TextFace("right_col1_row0")
right_c1_r1 = TextFace("right_col1_row1")
right_c1_r2 = TextFace("right_col1_row2")
top_c0_r0 = TextFace("top_col0_row0")
top_c0_r1 = TextFace("top_col0_row1")
bottom_c0_r0 = TextFace("bottom_col0_row0")
bottom_c0_r1 = TextFace("bottom_col0_row1")
aligned_c0_r0 = TextFace("aligned_col0_row0")
aligned_c0_r1 = TextFace("aligned_col0_row1")
aligned_c1_r0 = TextFace("aligned_col1_row0")
aligned_c1_r1 = TextFace("aligned_col1_row1")
all_faces = [right_c0_r0, right_c0_r1, right_c1_r0, right_c1_r1, right_c1_r2, top_c0_r0, \
top_c0_r1, bottom_c0_r0, bottom_c0_r1, aligned_c0_r0, aligned_c0_r1,\
aligned_c1_r0, aligned_c1_r1]
# set a border in all faces
for f in all_faces:
f.margin_border.width = 1
f.margin_bottom = 5
f.margin_top = 5
f.margin_right = 10
t.add_face(right_c0_r0, column=0, position="branch-right")
t.add_face(right_c0_r1, column=0, position="branch-right")
t.add_face(right_c1_r0, column=1, position="branch-right")
t.add_face(right_c1_r1, column=1, position="branch-right")
t.add_face(right_c1_r2, column=1, position="branch-right")
t.add_face(top_c0_r0, column=0, position="branch-top")
t.add_face(top_c0_r1, column=0, position="branch-top")
t.add_face(bottom_c0_r0, column=0, position="branch-bottom")
t.add_face(bottom_c0_r1, column=0, position="branch-bottom")
a = t&"a"
a.set_style(NodeStyle())
a.img_style["bgcolor"] = "lightgreen"
b = t&"b"
b.set_style(NodeStyle())
b.img_style["bgcolor"] = "indianred"
c = t&"c"
c.set_style(NodeStyle())
c.img_style["bgcolor"] = "lightblue"
t.set_style(NodeStyle())
t.img_style["bgcolor"] = "lavender"
t.img_style["size"] = 12
for leaf in t.iter_leaves():
leaf.img_style["size"] = 12
leaf.add_face(right_c0_r0, 0, "branch-right")
leaf.add_face(aligned_c0_r1, 0, "aligned")
leaf.add_face(aligned_c0_r0, 0, "aligned")
leaf.add_face(aligned_c1_r1, 1, "aligned")
leaf.add_face(aligned_c1_r0, 1, "aligned")
ts = TreeStyle()
ts.show_scale = False
t.render("face_positions.png", w=800, tree_style=ts)
|
gpl-3.0
|
vane/pywinauto
|
pywinauto/tests/_menux.py
|
17
|
2502
|
# GUI Application automation and testing library
# Copyright (C) 2006 Mark Mc Mahon
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
"tests a set of controls for repeated hotkey errors"
__revision__ = "$Revision$"
#
#
#from Test_AsianHotkey import AsianHotkeyFormatIncorrect
#from win32structures import RECT, LOGFONTW
#
#class DummyCtrl(dict):
# def __getattr__(self, name):
# if name not in self:
# if name + "s" in self:
# return self[name + "s"][0]
# return self[name]
#
#
#
#
#
#
#
##-----------------------------------------------------------------------------
#def MenuRepeatedHotkeyTest(windows):
# "Return the repeated hotkey errors"
# bugs = []
#
# for win in windows:
# if win.MenuItems:
# # we need to get all the separate menu blocks!
# menuBlocks = GetMenuItemsAsCtrlBocks(win.MenuItems)
#
# for menuBlock in menuBlocks:
#
# for test in TestFuncRegistry().RegisteredClasses():
#
# TestFunc = TestFuncRegistry().GetClass(test)
#
# if hasattr(TestFunc, "TestsMenus") and TestFunc.TestsMenus:
#
# testBugs = TestFunc(menuBlock)
# bugs.extend(testBugs)
#
#
##
## if AsianHotkeyFormatIncorrect(item['Text']):
## bugs.append(
## (
## [win,],
## {
## "MenuItem": item['Text'],
## },
## "MenuAsianHotkeyFormat",
## 0)
## )
##
##
#
#
#
## bugs.append((
## controls,
## {
## "RepeatedHotkey" : char,
## "CharsUsedInDialog" : SetAsString(hotkeys),
## "AllCharsInDialog" : SetAsString(allChars),
## "AvailableInControls" : ctrlsAvailableChars,
## },
## "RepeatedHotkey",
## isInRef)
#
# return bugs
#
#
#
#
#import tests
#tests.register("MenuRepeatedHotkey", AsiMenuRepeatedHotkeyTest)
|
lgpl-2.1
|
mozilla/captain
|
vendor/lib/python/django/contrib/admindocs/views.py
|
93
|
15143
|
import inspect
import os
import re
from django import template
from django.template import RequestContext
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from django.db import models
from django.shortcuts import render_to_response
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.http import Http404
from django.core import urlresolvers
from django.contrib.admindocs import utils
from django.contrib.sites.models import Site
from django.utils.importlib import import_module
from django.utils._os import upath
from django.utils import six
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
# Exclude methods starting with these strings from documentation
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
class GenericSite(object):
domain = 'example.com'
name = 'my site'
@staff_member_required
def doc_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
return render_to_response('admin_doc/index.html', {
'root_path': urlresolvers.reverse('admin:index'),
}, context_instance=RequestContext(request))
@staff_member_required
def bookmarklets(request):
admin_root = urlresolvers.reverse('admin:index')
return render_to_response('admin_doc/bookmarklets.html', {
'root_path': admin_root,
'admin_url': "%s://%s%s" % (request.is_secure() and 'https' or 'http', request.get_host(), admin_root),
}, context_instance=RequestContext(request))
@staff_member_required
def template_tag_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
load_all_installed_template_libraries()
tags = []
app_libs = list(six.iteritems(template.libraries))
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for tag_name, tag_func in library.tags.items():
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
if title:
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
if body:
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
if library in template.builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
tags.append({
'name': tag_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
return render_to_response('admin_doc/template_tag_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'tags': tags
}, context_instance=RequestContext(request))
@staff_member_required
def template_filter_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
load_all_installed_template_libraries()
filters = []
app_libs = list(six.iteritems(template.libraries))
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for filter_name, filter_func in library.filters.items():
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
if title:
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
if body:
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
if library in template.builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
filters.append({
'name': filter_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
return render_to_response('admin_doc/template_filter_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'filters': filters
}, context_instance=RequestContext(request))
@staff_member_required
def view_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
if settings.ADMIN_FOR:
settings_modules = [import_module(m) for m in settings.ADMIN_FOR]
else:
settings_modules = [settings]
views = []
for settings_mod in settings_modules:
urlconf = import_module(settings_mod.ROOT_URLCONF)
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
if Site._meta.installed:
site_obj = Site.objects.get(pk=settings_mod.SITE_ID)
else:
site_obj = GenericSite()
for (func, regex) in view_functions:
views.append({
'full_name': '%s.%s' % (func.__module__, getattr(func, '__name__', func.__class__.__name__)),
'site_id': settings_mod.SITE_ID,
'site': site_obj,
'url': simplify_regex(regex),
})
return render_to_response('admin_doc/view_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'views': views
}, context_instance=RequestContext(request))
@staff_member_required
def view_detail(request, view):
if not utils.docutils_is_available:
return missing_docutils_page(request)
mod, func = urlresolvers.get_mod_func(view)
try:
view_func = getattr(import_module(mod), func)
except (ImportError, AttributeError):
raise Http404
title, body, metadata = utils.parse_docstring(view_func.__doc__)
if title:
title = utils.parse_rst(title, 'view', _('view:') + view)
if body:
body = utils.parse_rst(body, 'view', _('view:') + view)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
return render_to_response('admin_doc/view_detail.html', {
'root_path': urlresolvers.reverse('admin:index'),
'name': view,
'summary': title,
'body': body,
'meta': metadata,
}, context_instance=RequestContext(request))
@staff_member_required
def model_index(request):
if not utils.docutils_is_available:
return missing_docutils_page(request)
m_list = [m._meta for m in models.get_models()]
return render_to_response('admin_doc/model_index.html', {
'root_path': urlresolvers.reverse('admin:index'),
'models': m_list
}, context_instance=RequestContext(request))
@staff_member_required
def model_detail(request, app_label, model_name):
if not utils.docutils_is_available:
return missing_docutils_page(request)
# Get the model class.
try:
app_mod = models.get_app(app_label)
except ImproperlyConfigured:
raise Http404(_("App %r not found") % app_label)
model = None
for m in models.get_models(app_mod):
if m._meta.object_name.lower() == model_name:
model = m
break
if model is None:
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % {'model_name': model_name, 'app_label': app_label})
opts = model._meta
# Gather fields/field descriptions.
fields = []
for field in opts.fields:
# ForeignKey is a special case since the field will actually be a
# descriptor that returns the other object
if isinstance(field, models.ForeignKey):
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = utils.parse_rst((_("the related `%(app_label)s.%(data_type)s` object") % {'app_label': app_label, 'data_type': data_type}), 'model', _('model:') + data_type)
else:
data_type = get_readable_field_data_type(field)
verbose = field.verbose_name
fields.append({
'name': field.name,
'data_type': data_type,
'verbose': verbose,
'help_text': field.help_text,
})
# Gather many-to-many fields.
for field in opts.many_to_many:
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': app_label, 'object_name': data_type}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
'verbose': utils.parse_rst(_("all %s") % verbose , 'model', _('model:') + opts.module_name),
})
fields.append({
'name' : "%s.count" % field.name,
'data_type' : 'Integer',
'verbose' : utils.parse_rst(_("number of %s") % verbose , 'model', _('model:') + opts.module_name),
})
# Gather model methods.
for func_name, func in model.__dict__.items():
if (inspect.isfunction(func) and len(inspect.getargspec(func)[0]) == 1):
try:
for exclude in MODEL_METHODS_EXCLUDE:
if func_name.startswith(exclude):
raise StopIteration
except StopIteration:
continue
verbose = func.__doc__
if verbose:
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.module_name)
fields.append({
'name': func_name,
'data_type': get_return_data_type(func_name),
'verbose': verbose,
})
# Gather related objects
for rel in opts.get_all_related_objects() + opts.get_all_related_many_to_many_objects():
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': rel.opts.app_label, 'object_name': rel.opts.object_name}
accessor = rel.get_accessor_name()
fields.append({
'name' : "%s.all" % accessor,
'data_type' : 'List',
'verbose' : utils.parse_rst(_("all %s") % verbose , 'model', _('model:') + opts.module_name),
})
fields.append({
'name' : "%s.count" % accessor,
'data_type' : 'Integer',
'verbose' : utils.parse_rst(_("number of %s") % verbose , 'model', _('model:') + opts.module_name),
})
return render_to_response('admin_doc/model_detail.html', {
'root_path': urlresolvers.reverse('admin:index'),
'name': '%s.%s' % (opts.app_label, opts.object_name),
'summary': _("Fields on %s objects") % opts.object_name,
'description': model.__doc__,
'fields': fields,
}, context_instance=RequestContext(request))
@staff_member_required
def template_detail(request, template):
templates = []
for site_settings_module in settings.ADMIN_FOR:
settings_mod = import_module(site_settings_module)
if Site._meta.installed:
site_obj = Site.objects.get(pk=settings_mod.SITE_ID)
else:
site_obj = GenericSite()
for dir in settings_mod.TEMPLATE_DIRS:
template_file = os.path.join(dir, template)
templates.append({
'file': template_file,
'exists': os.path.exists(template_file),
'contents': lambda: os.path.exists(template_file) and open(template_file).read() or '',
'site_id': settings_mod.SITE_ID,
'site': site_obj,
'order': list(settings_mod.TEMPLATE_DIRS).index(dir),
})
return render_to_response('admin_doc/template_detail.html', {
'root_path': urlresolvers.reverse('admin:index'),
'name': template,
'templates': templates,
}, context_instance=RequestContext(request))
####################
# Helper functions #
####################
def missing_docutils_page(request):
"""Display an error message for people without docutils"""
return render_to_response('admin_doc/missing_docutils.html')
def load_all_installed_template_libraries():
# Load/register all template tag libraries from installed apps.
for module_name in template.get_templatetags_modules():
mod = import_module(module_name)
try:
libraries = [
os.path.splitext(p)[0]
for p in os.listdir(os.path.dirname(upath(mod.__file__)))
if p.endswith('.py') and p[0].isalpha()
]
except OSError:
libraries = []
for library_name in libraries:
try:
lib = template.get_library(library_name)
except template.InvalidTemplateLibrary:
pass
def get_return_data_type(func_name):
"""Return a somewhat-helpful data type given a function name"""
if func_name.startswith('get_'):
if func_name.endswith('_list'):
return 'List'
elif func_name.endswith('_count'):
return 'Integer'
return ''
def get_readable_field_data_type(field):
"""Returns the description for a given field type, if it exists,
Fields' descriptions can contain format strings, which will be interpolated
against the values of field.__dict__ before being output."""
return field.description % field.__dict__
def extract_views_from_urlpatterns(urlpatterns, base=''):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a two-tuple: (view_func, regex)
"""
views = []
for p in urlpatterns:
if hasattr(p, 'url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern))
elif hasattr(p, 'callback'):
try:
views.append((p.callback, base + p.regex.pattern))
except ViewDoesNotExist:
continue
else:
raise TypeError(_("%s does not appear to be a urlpattern object") % p)
return views
named_group_matcher = re.compile(r'\(\?P(<\w+>).+?\)')
non_named_group_matcher = re.compile(r'\(.*?\)')
def simplify_regex(pattern):
"""
Clean up urlpattern regexes into something somewhat readable by Mere Humans:
turns something like "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
into "<sport_slug>/athletes/<athlete_slug>/"
"""
# handle named groups first
pattern = named_group_matcher.sub(lambda m: m.group(1), pattern)
# handle non-named groups
pattern = non_named_group_matcher.sub("<var>", pattern)
# clean up any outstanding regex-y characters.
pattern = pattern.replace('^', '').replace('$', '').replace('?', '').replace('//', '/').replace('\\', '')
if not pattern.startswith('/'):
pattern = '/' + pattern
return pattern
|
mpl-2.0
|
kbeckmann/Annalog
|
uptime.py
|
1
|
2015
|
import re
import time
import sys
from datetime import datetime,timedelta
from dateutil import tz
class UpTime():
def __init__(self, mucbot):
self.mucbot = mucbot
self.startTime = datetime.now()
def delta_string(self, delta):
h = divmod(delta.seconds, 3600)
m = divmod(h[1], 60)
s = divmod(m[1], 60)
t = []
if delta.days > 0:
t.append("%d day%s" % (delta.days, "" if delta.days == 1 else "s"))
if h[0] > 0:
t.append("%d hour%s" % (h[0], "" if h[0] == 1 else "s"))
if m[0] > 0:
t.append("%d minute%s" % (m[0], "" if m[0] == 1 else "s"))
if s[1] > 0:
t.append("%d second%s" % (s[1], "" if s[0] == 1 else "s"))
if len(t) == 0:
t.append('ett ' + u'\u00F6' + 'gonblick')
return ' '.join(t)
def handle(self, msg):
if msg['body'][:7] == "!uptime":
nowTime = datetime.now()
bot = nowTime - self.startTime
with open('/proc/uptime', 'r') as f:
upSeconds = float(f.readline().split()[0])
server = timedelta(seconds = upSeconds)
body = 'Uptime: bot - %s, server - %s' % (self.delta_string(bot), self.delta_string(server))
self.mucbot.send_message(mto=msg['from'].bare,
mbody=body,
mtype='groupchat')
def help(self):
return ['uptime - show uptime']
class SchedulerMock():
def add(self, name, seconds, callback, repeat=False):
return
class MUCBotMock():
def __init__(self):
self.scheduler = SchedulerMock()
def send_message(self, mto, mbody, mtype):
print "MUCBotMock:", mto, mbody, mtype
class FromMock():
def __init__(self, _from):
self.bare = _from
def do_test():
x = UpTime(MUCBotMock())
msg = {"from" : FromMock("[email protected]"), "mucnick" : "kallsse", "body" : "!uptime"}
x.handle(msg)
if __name__ == "__main__":
do_test()
|
mit
|
nixpanic/gluster-wireshark-1.4
|
tools/indexcap.py
|
3
|
11543
|
#!/usr/bin/python
#
# Tool to index protocols that appears in the given capture files
#
# Copyright 2009, Kovarththanan Rajaratnam <[email protected]>
#
# $Id$
#
# Wireshark - Network traffic analyzer
# By Gerald Combs <[email protected]>
# Copyright 1998 Gerald Combs
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
from optparse import OptionParser
import multiprocessing
import sys
import os
import subprocess
import re
import pickle
import tempfile
import filecmp
import random
def extract_protos_from_file_proces(tshark, file):
try:
cmd = [tshark, "-Tfields", "-e", "frame.protocols", "-r", file]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
return (file, {})
proto_hash = {}
for line in stdout.splitlines():
if not re.match(r'^[\w:-]+$', line):
continue
for proto in line.split(':'):
proto_hash[proto] = 1 + proto_hash.setdefault(proto, 0)
return (file, proto_hash)
except KeyboardInterrupt:
return None
def extract_protos_from_file(tshark, num_procs, max_files, cap_files, cap_hash, index_file_name):
pool = multiprocessing.Pool(num_procs)
results = [pool.apply_async(extract_protos_from_file_proces, [tshark, file]) for file in cap_files]
try:
for (cur_item_idx,result_async) in enumerate(results):
file_result = result_async.get()
action = "SKIPPED" if file_result[1] is {} else "PROCESSED"
print "%s [%u/%u] %s %u bytes" % (action, cur_item_idx+1, max_files, file_result[0], os.path.getsize(file_result[0]))
cap_hash.update(dict([file_result]))
except KeyboardInterrupt:
print "%s was interrupted by user" % (sys.argv[0])
pool.terminate()
exit(1)
index_file = open(index_file_name, "w")
pickle.dump(cap_hash, index_file)
index_file.close()
exit(0)
def dissect_file_process(tshark, tmpdir, file):
try:
(handle_o, tmpfile_o) = tempfile.mkstemp(suffix='_stdout', dir=tmpdir)
(handle_e, tmpfile_e) = tempfile.mkstemp(suffix='_stderr', dir=tmpdir)
cmd = [tshark, "-nxVr", file]
p = subprocess.Popen(cmd, stdout=handle_o, stderr=handle_e)
(stdout, stderr) = p.communicate()
if p.returncode == 0:
return (file, True, tmpfile_o, tmpfile_e)
else:
return (file, False, tmpfile_o, tmpfile_e)
except KeyboardInterrupt:
return False
finally:
os.close(handle_o)
os.close(handle_e)
def dissect_files(tshark, tmpdir, num_procs, max_files, cap_files):
pool = multiprocessing.Pool(num_procs)
results = [pool.apply_async(dissect_file_process, [tshark, tmpdir, file]) for file in cap_files]
try:
for (cur_item_idx,result_async) in enumerate(results):
file_result = result_async.get()
action = "FAILED" if file_result[1] is False else "PASSED"
print "%s [%u/%u] %s %u bytes" % (action, cur_item_idx+1, max_files, file_result[0], os.path.getsize(file_result[0]))
except KeyboardInterrupt:
print "%s was interrupted by user" % (sys.argv[0])
pool.terminate()
exit(1)
def compare_files(tshark_bin, tmpdir, tshark_cmp, num_procs, max_files, cap_files):
pool = multiprocessing.Pool(num_procs)
results_bin = [pool.apply_async(dissect_file_process, [tshark_bin, tmpdir, file]) for file in cap_files]
results_cmp = [pool.apply_async(dissect_file_process, [tshark_cmp, tmpdir, file]) for file in cap_files]
try:
for (cur_item_idx,(result_async_bin, result_async_cmp)) in enumerate(zip(results_bin, results_cmp)):
file_result_bin = result_async_bin.get()
file_result_cmp = result_async_cmp.get()
if file_result_cmp[1] is False or file_result_bin[1] is False:
action = "FAILED (exitcode)"
if not filecmp.cmp(file_result_bin[2], file_result_cmp[2]):
action = "FAILED (stdout)"
if not filecmp.cmp(file_result_bin[3], file_result_cmp[3]):
action = "FAILED (stderr)"
else:
action = "PASSED"
os.remove(file_result_bin[2])
os.remove(file_result_cmp[2])
os.remove(file_result_bin[3])
os.remove(file_result_cmp[3])
print "%s [%u/%u] %s %u bytes" % (action, cur_item_idx+1, max_files, file_result_bin[0], os.path.getsize(file_result_bin[0]))
print "%s [%u/%u] %s %u bytes" % (action, cur_item_idx+1, max_files, file_result_cmp[0], os.path.getsize(file_result_cmp[0]))
except KeyboardInterrupt:
print "%s was interrupted by user" % (sys.argv[0])
pool.terminate()
exit(1)
def list_all_proto(cap_hash):
proto_hash = {}
for files_hash in cap_hash.itervalues():
for proto,count in files_hash.iteritems():
proto_hash[proto] = count + proto_hash.setdefault(proto, 0)
return proto_hash
def list_all_files(cap_hash):
files = cap_hash.keys()
files.sort()
return files
def list_all_proto_files(cap_hash, proto_comma_delit):
protos = [ x.strip() for x in proto_comma_delit.split(',') ]
files = []
for (file, files_hash) in cap_hash.iteritems():
for proto in files_hash.iterkeys():
if proto in protos:
files.append(file)
break
return files
def index_file_action(options):
return options.list_all_proto or \
options.list_all_files or \
options.list_all_proto_files or \
options.dissect_files
def find_capture_files(paths, cap_hash):
cap_files = []
for path in paths:
if os.path.isdir(path):
path = os.path.normpath(path)
for root, dirs, files in os.walk(path):
cap_files += [os.path.join(root, name) for name in files if os.path.join(root, name) not in cap_hash]
elif path not in cap_hash:
cap_files.append(path)
return cap_files
def find_tshark_executable(bin_dir):
for file in ["tshark.exe", "tshark"]:
tshark = os.path.join(bin_dir, file)
if os.access(tshark, os.X_OK):
return tshark
return None
def main():
parser = OptionParser(usage="usage: %prog [options] index_file [file_1|dir_1 [.. file_n|dir_n]]")
parser.add_option("-d", "--dissect-files", dest="dissect_files", default=False, action="store_true",
help="Dissect all matching files")
parser.add_option("-m", "--max-files", dest="max_files", default=sys.maxint, type="int",
help="Max number of files to process")
parser.add_option("-b", "--binary-dir", dest="bin_dir", default=os.getcwd(),
help="Directory containing tshark executable")
parser.add_option("-c", "--compare-dir", dest="compare_dir", default=None,
help="Directory containing tshark executable which is used for comparison")
parser.add_option("-j", dest="num_procs", default=multiprocessing.cpu_count(), type=int,
help="Max number of processes to spawn")
parser.add_option("-r", "--randomize", default=False, action="store_true",
help="Randomize the file list order")
parser.add_option("", "--list-all-proto", dest="list_all_proto", default=False, action="store_true",
help="List all protocols in index file")
parser.add_option("", "--list-all-files", dest="list_all_files", default=False, action="store_true",
help="List all files in index file")
parser.add_option("", "--list-all-proto-files", dest="list_all_proto_files", default=False,
metavar="PROTO_1[, .. PROTO_N]",
help="List all files in index file containing the given protocol")
(options, args) = parser.parse_args()
if len(args) == 0:
parser.error("index_file is a required argument")
if len(args) == 1 and not index_file_action(options):
parser.error("one capture file/directory must be specified")
if options.dissect_files and not options.list_all_files and not options.list_all_proto_files:
parser.error("--list-all-files or --list-all-proto-files must be specified")
if options.dissect_files and not options.compare_dir is None:
parser.error("--dissect-files and --compare-dir cannot be specified at the same time")
index_file_name = args.pop(0)
paths = args
cap_hash = {}
try:
index_file = open(index_file_name, "r")
print "index file:", index_file.name, "[OPENED]",
cap_hash = pickle.load(index_file)
index_file.close()
print len(cap_hash), "files"
except IOError:
print "index file:", index_file_name, "[NEW]"
if options.list_all_proto:
print list_all_proto(cap_hash)
exit(0)
indexed_files = []
if options.list_all_files:
indexed_files = list_all_files(cap_hash)
print indexed_files
if options.list_all_proto_files:
indexed_files = list_all_proto_files(cap_hash, options.list_all_proto_files)
print indexed_files
tshark_bin = find_tshark_executable(options.bin_dir)
if not tshark_bin is None:
print "tshark:", tshark_bin, "[FOUND]"
else:
print "tshark:", tshark_bin, "[MISSING]"
exit(1)
if not options.compare_dir is None:
tshark_cmp = find_tshark_executable(options.compare_dir)
if not tshark_cmp is None:
print "tshark:", tshark_cmp, "[FOUND]"
else:
print "tshark:", tshark_cmp, "[MISSING]"
exit(1)
if options.dissect_files or options.compare_dir:
cap_files = indexed_files
elif options.list_all_proto_files or options.list_all_files:
exit(0)
else:
cap_files = find_capture_files(paths, cap_hash)
if options.randomize:
random.shuffle(cap_files)
else:
cap_files.sort()
options.max_files = min(options.max_files, len(cap_files))
print "%u total files, %u working files" % (len(cap_files), options.max_files)
cap_files = cap_files[:options.max_files]
tmpdir = tempfile.mkdtemp()
print "Temporary working dir: %s" % tmpdir
if options.compare_dir:
compare_files(tshark_bin, tmpdir, tshark_cmp, options.num_procs, options.max_files, cap_files)
elif options.dissect_files:
dissect_files(tshark_bin, tmpdir, options.num_procs, options.max_files, cap_files)
else:
extract_protos_from_file(tshark_bin, tmpdir, options.num_procs, options.max_files, cap_files, cap_hash, index_file_name)
os.rmdir(tmpdir)
if __name__ == "__main__":
main()
|
gpl-2.0
|
rakhuba/tucker3d
|
cross/multifun.py
|
1
|
19357
|
import numpy as np
import time
from math import pi
import copy
from scipy.special import erf
import tucker3d as tuck
def multifun(X, delta_cross, fun, r_add=4, y0=None, rmax=100, pr=None):
# For X = [X_1,...,X_d], where X_i - tensors in the Tucker format
# cross_func computes y = func(X) == func(x_1,...,x_d) in the Tucker format by using cross3d
#
# delta_cross - accuracy for cross3D
# r_add - number of computed columns on each iteration of cross3d. May be used to improve time performing.
d = len(X)
if type(r_add) == int:
r_add = [r_add, r_add, r_add]
elif len(r_add) == 3:
None
else:
raise Exception('r_add must be of type int or list of len = 3')
eps_cross = 1
if pr != None:
print('cross multifun... \n')
r = copy.copy(r_add)
n = X[0].n
N = int((min(n)+1)/2)
# Type check
list = [X[i].u[0][0,0] for i in range(len(X))]
if type(np.sum(list)) is np.complex128:
dtype = np.complex128
else:
dtype = np.float64
if pr != None:
print('data type is', dtype)
# if there is initial guess
if y0 != None:
Q1, R = np.linalg.qr(y0.u[0]);
row_order_U1 = np.sort(tuck.mv.maxvol(Q1));
Q2, R = np.linalg.qr(y0.u[1]);
row_order_U2 = np.sort(tuck.mv.maxvol(Q2));
Q3, R = np.linalg.qr(y0.u[2]);
row_order_U3 = np.sort(tuck.mv.maxvol(Q3));
r0 = [len(row_order_U1), len(row_order_U2), len(row_order_U3)]
A = [None]*d
for alpha in range(d):
A[alpha] = np.tensordot(X[alpha].core, np.transpose(X[alpha].u[2][row_order_U3,:]), (2, 0))
A[alpha] = np.tensordot(np.transpose(A[alpha], [2,0,1]), np.transpose(X[alpha].u[1][row_order_U2,:]), (2, 0))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]), np.transpose(X[alpha].u[0][row_order_U1,:]), (2, 0))
A[alpha] = np.transpose(A[alpha], [2,1,0])
Ar = fun(A)
A1 = np.reshape(Ar, [r0[0],-1], order='f')
A1 = np.transpose(A1)
Q_A1, R = np.linalg.qr(A1)
column_order_U1 = tuck.mv.maxvol(Q_A1)
A1_11 = A1[column_order_U1, :]
A2 = np.reshape(np.transpose(Ar, [1,0,2]), [r0[1],-1], order='f')
A2 = np.transpose(A2)
Q_A2, R = np.linalg.qr(A2)
column_order_U2 = tuck.mv.maxvol(Q_A2)
A2_11 = A2[column_order_U2, :]
A3 = np.reshape(np.transpose(Ar, [2,0,1]), [r0[2],-1], order='f')
A3 = np.transpose(A3)
Q_A3, R = np.linalg.qr(A3)
column_order_U3 = tuck.mv.maxvol(Q_A3)
A3_11 = A3[column_order_U3, :]
u1 = np.zeros((n[0], r0[0]), dtype=dtype)
for i in range(r0[0]):
for alpha in range(d):
k1_order, j1_order = mod(column_order_U1[i], r0[1])
A[alpha] = np.dot(X[alpha].core,np.transpose(X[alpha].u[2][row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,0,1]), np.transpose(X[alpha].u[1][row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]), np.transpose(X[alpha].u[0]), (2,0))
A[alpha] = np.transpose(A[alpha], [2,1,0])[:, 0, 0]
u1[:,i] = fun(A)
u2 = np.zeros((n[1], r0[1]), dtype=dtype)
for j in range(r0[1]):
for alpha in range(d):
k1_order, i1_order = mod(column_order_U2[j], r0[0])
A[alpha] = np.dot(X[alpha].core, np.transpose(X[alpha].u[2][row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,1,0]),np.transpose(X[alpha].u[0][row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]),np.transpose(X[alpha].u[1]), (2, 0))
A[alpha] = np.transpose(A[alpha], [1,2,0])[0, :, 0]
u2[:,j] = fun(A)
u3 = np.zeros((n[2], r0[2]), dtype=dtype)
for k in range(r0[2]):
for alpha in range(d):
j1_order, i1_order = mod(column_order_U3[k], r0[0])
A[alpha] = np.dot(np.transpose(X[alpha].core, [2,1,0]),np.transpose(X[alpha].u[0][row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [0,2,1]),np.transpose(X[alpha].u[1][row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [1,2,0]),np.transpose(X[alpha].u[2]), (2, 0))[0,0,:]
u3[:,k] = fun(A)
else:
############################################################
############################################################
############################################################
GG = np.zeros(r, dtype=dtype)
u1 = np.zeros((n[0], r_add[0]), dtype=dtype)
u2 = np.zeros((n[1], r_add[1]), dtype=dtype)
u3 = np.zeros((n[2], r_add[2]), dtype=dtype)
u1[:N,:] = np.random.random((N,r_add[0]))
u2[:N,:] = np.random.random((N,r_add[1]))
u3[:N,:] = np.random.random((N,r_add[2]))
u1, R = np.linalg.qr(u1)
u2, R = np.linalg.qr(u2)
u3, R = np.linalg.qr(u3)
row_order_U1 = tuck.mv.maxvol(u1)
row_order_U2 = tuck.mv.maxvol(u2)
row_order_U3 = tuck.mv.maxvol(u3)
r0 = [len(row_order_U1), len(row_order_U1), len(row_order_U1)]
A = [None]*d
for alpha in range(d):
A[alpha] = np.tensordot(X[alpha].core, np.transpose(X[alpha].u[2][row_order_U3,:]), (2, 0))
A[alpha] = np.tensordot(np.transpose(A[alpha], [2,0,1]), np.transpose(X[alpha].u[1][row_order_U2,:]), (2, 0))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]), np.transpose(X[alpha].u[0][row_order_U1,:]), (2, 0))
A[alpha] = np.transpose(A[alpha], [2,1,0])
Ar = fun(A)
A1 = np.reshape(Ar, [r0[0],-1], order='f')
A1 = np.transpose(A1)
Q_A1, R = np.linalg.qr(A1)
column_order_U1 = tuck.mv.maxvol(Q_A1)
A1_11 = A1[column_order_U1, :]
A2 = np.reshape(np.transpose(Ar, [1,0,2]), [r0[1],-1], order='f')
A2 = np.transpose(A2)
Q_A2, R = np.linalg.qr(A2)
column_order_U2 = tuck.mv.maxvol(Q_A2)
A2_11 = A2[column_order_U2, :]
A3 = np.reshape(np.transpose(Ar, [2,0,1]), [r0[2],-1], order='f')
A3 = np.transpose(A3)
Q_A3, R = np.linalg.qr(A3)
column_order_U3 = tuck.mv.maxvol(Q_A3)
A3_11 = A3[column_order_U3, :]
#################################################################################
U1 = u1
U2 = u2
U3 = u3
U1_hat = np.linalg.solve(U1[row_order_U1, :].T, U1.T).T
U2_hat = np.linalg.solve(U2[row_order_U2, :].T, U2.T).T
U3_hat = np.linalg.solve(U3[row_order_U3, :].T, U3.T).T
u1 = np.random.random((n[0],r_add[0]))
u2 = np.random.random((n[1],r_add[1]))
u3 = np.random.random((n[2],r_add[2]))
UU1, ind_update_1 = column_update(U1_hat, u1, row_order_U1)
UU2, ind_update_2 = column_update(U2_hat, u2, row_order_U2)
UU3, ind_update_3 = column_update(U3_hat, u3, row_order_U3)
U1 = np.concatenate((U1, u1), 1)
U2 = np.concatenate((U2, u2), 1)
U3 = np.concatenate((U3, u3), 1)
A1_12 = np.zeros((r0[0], r_add[0]),dtype=dtype)
for ii in range(r0[0]):
for alpha in range(d):
k1_order, j1_order = mod(column_order_U1[ii], r0[1])
A[alpha] = np.dot(X[alpha].core, np.transpose(X[alpha].u[2][row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,0,1]),np.transpose(X[alpha].u[1][row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]),np.transpose(X[alpha].u[0][ind_update_1, :]), (2, 0))
A[alpha] = np.transpose(A[alpha], [2,1,0])[:,0,0]
A1_12[ii,:] = fun(A)
A2_12 = np.zeros((r0[1], r_add[1]),dtype=dtype)
for ii in range(r0[1]):
for alpha in range(d):
k1_order, i1_order = mod(column_order_U2[ii], r0[0])
A[alpha] = np.dot(X[alpha].core, np.transpose(X[alpha].u[2][row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,1,0]), np.transpose(X[alpha].u[0][row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]), np.transpose(X[alpha].u[1][ind_update_2, :]), (2, 0))
A[alpha] = np.transpose(A[alpha], [1,2,0])[0,:,0]
A2_12[ii, :] = fun(A)
A3_12 = np.zeros((r0[2], r_add[2]),dtype=dtype)
for ii in range(r0[2]):
for alpha in range(d):
j1_order, i1_order = mod(column_order_U3[ii], r0[0])
A[alpha] = np.dot(np.transpose(X[alpha].core, [2,1,0]),np.transpose(X[alpha].u[0][row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [0,2,1]),np.transpose(X[alpha].u[1][row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [1,2,0]),np.transpose(X[alpha].u[2][ind_update_3, :]), (2, 0))[0,0,:]
A3_12[ii, :] = fun(A)
r[0] = r0[0]+r_add[0]
r[1] = r0[1]+r_add[1]
r[2] = r0[2]+r_add[2]
while True:
for alpha in range(d):
A[alpha] = np.dot(np.transpose(X[alpha].core, [2,1,0]), np.transpose(X[alpha].u[0][ind_update_1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [0,2,1]), np.transpose(X[alpha].u[1][row_order_U2,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [1,2,0]), np.transpose(X[alpha].u[2][row_order_U3,:]))
Ar_1 = np.concatenate((Ar, fun(A)), 0)
row_order_U1 = np.concatenate((row_order_U1, ind_update_1))
for alpha in range(d):
A[alpha] = np.dot(np.transpose(X[alpha].core, [0,2,1]), np.transpose(X[alpha].u[1][ind_update_2,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [0,2,1]), np.transpose(X[alpha].u[2][row_order_U3,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,1,0]), np.transpose(X[alpha].u[0][row_order_U1,:]))
A[alpha] = np.transpose(A[alpha], [2,1,0])
Ar_2 = np.concatenate((Ar_1, fun(A)), 1)
row_order_U2 = np.concatenate((row_order_U2, ind_update_2))
for alpha in range(d):
A[alpha] = np.dot(X[alpha].core, np.transpose(X[alpha].u[2][ind_update_3,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,0,1]),np.transpose(X[alpha].u[1][row_order_U2,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [0,2,1]),np.transpose(X[alpha].u[0][row_order_U1,:]))
A[alpha] = np.transpose(A[alpha], [2,1,0])
Ar = np.concatenate((Ar_2, fun(A)), 2)
row_order_U3 = np.concatenate((row_order_U3, ind_update_3))
A1 = np.reshape(Ar, [r[0],-1], order='f')
A1 = np.transpose(A1)
column_order_update_U1 = tuck.mv.maxvol( schur_comp(A1, A1_11, A1_12, dtype) )
r_add[0] = len(column_order_update_U1)
A2 = np.reshape(np.transpose(Ar, [1,0,2]), [r[1],-1], order='f')
A2 = np.transpose(A2)
column_order_update_U2 = tuck.mv.maxvol( schur_comp(A2, A2_11, A2_12, dtype) )
r_add[1] = len(column_order_update_U2)
A3 = np.reshape(np.transpose(Ar, [2,0,1]), [r[2],-1], order='f')
A3 = np.transpose(A3)
column_order_update_U3 = tuck.mv.maxvol( schur_comp(A3, A3_11, A3_12, dtype) )
r_add[2] = len(column_order_update_U3)
u1_approx = np.zeros((n[0], r_add[0]), dtype=dtype)
u1 = np.zeros((n[0], r_add[0]), dtype=dtype)
for i in range(r_add[0]):
for alpha in range(d):
k1_order, j1_order = mod(column_order_update_U1[i], r[1])
A[alpha] = np.dot(X[alpha].core, np.transpose(X[alpha].u[2][row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,0,1]),np.transpose(X[alpha].u[1][row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]),np.transpose(X[alpha].u[0]), (2, 0))
A[alpha] = np.transpose(A[alpha], [2,1,0])[:,0,0]
u1[:,i] = fun(A)
u1_approx_i = np.dot(Ar, np.transpose(UU3[row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
u1_approx_i = np.dot(np.transpose(u1_approx_i,[2,0,1]),np.transpose(UU2[row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
u1_approx_i = np.tensordot(np.transpose(u1_approx_i,[0,2,1]),np.transpose(UU1), (2, 0))
u1_approx_i = np.transpose(u1_approx_i,[2,1,0])
u1_approx[:,i] = u1_approx_i[:, 0, 0]
u2_approx = np.zeros((n[1], r_add[1]), dtype=dtype)
u2 = np.zeros((n[1], r_add[1]), dtype=dtype)
for j in range(r_add[1]):
for alpha in range(d):
k1_order, i1_order = mod(column_order_update_U2[j], r[0])
A[alpha] = np.dot(X[alpha].core, np.transpose(X[alpha].u[2][row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [2,1,0]), np.transpose(X[alpha].u[0][row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [0,2,1]), np.transpose(X[alpha].u[1]), (2, 0))
A[alpha] = np.transpose(A[alpha], [1,2,0])[0,:,0]
u2[:,j] = fun(A)
u2_approx_j = np.dot(Ar,np.transpose(UU3[row_order_U3[k1_order]:row_order_U3[k1_order]+1,:]))
u2_approx_j = np.dot(np.transpose(u2_approx_j,[2,1,0]),np.transpose(UU1[row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
u2_approx_j = np.tensordot(np.transpose(u2_approx_j,[0,2,1]),np.transpose(UU2), (2, 0))
u2_approx[:,j] = u2_approx_j[0, 0, :]
u3_approx = np.zeros((n[2], r_add[2]), dtype=dtype)
u3 = np.zeros((n[2], r_add[2]), dtype=dtype)
for k in range(r_add[2]):
for alpha in range(d):
j1_order, i1_order = mod(column_order_update_U3[k], r[0])
A[alpha] = np.dot(np.transpose(X[alpha].core, [2,1,0]),np.transpose(X[alpha].u[0][row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
A[alpha] = np.dot(np.transpose(A[alpha], [0,2,1]),np.transpose(X[alpha].u[1][row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
A[alpha] = np.tensordot(np.transpose(A[alpha], [1,2,0]),np.transpose(X[alpha].u[2]), (2, 0))[0,0,:]
u3[:,k] = fun(A)
u3_approx_k = np.dot(np.transpose(Ar,[2,1,0]),np.transpose(UU1[row_order_U1[i1_order]:row_order_U1[i1_order]+1,:]))
u3_approx_k = np.dot(np.transpose(u3_approx_k,[0,2,1]),np.transpose(UU2[row_order_U2[j1_order]:row_order_U2[j1_order]+1,:]))
u3_approx_k = np.tensordot(np.transpose(u3_approx_k,[1,2,0]),np.transpose(UU3), (2, 0))
u3_approx[:,k] = u3_approx_k[0, 0, :]
eps_cross = 1./3*( np.linalg.norm(u1_approx - u1)/ np.linalg.norm(u1) +
np.linalg.norm(u2_approx - u2)/ np.linalg.norm(u2) +
np.linalg.norm(u3_approx - u3)/ np.linalg.norm(u3) )
if pr != None:
print('relative accuracy = %s' % (eps_cross), 'ranks = %s' % r)
if eps_cross < delta_cross:
break
elif r[0] > rmax:
print('Rank has exceeded rmax value')
break
#print np.linalg.norm( full(G, U1, U2, U3) - C_toch )/np.linalg.norm(C_toch)
UU1, ind_update_1 = column_update(UU1, u1, row_order_U1)
UU2, ind_update_2 = column_update(UU2, u2, row_order_U2)
UU3, ind_update_3 = column_update(UU3, u3, row_order_U3)
U1 = np.concatenate((U1, u1), 1)
U2 = np.concatenate((U2, u2), 1)
U3 = np.concatenate((U3, u3), 1)
A1_11 = np.concatenate((A1_11, A1_12), 1)
A1_11 = np.concatenate((A1_11, A1[column_order_update_U1,:]) )
A2_11 = np.concatenate((A2_11, A2_12), 1)
A2_11 = np.concatenate((A2_11, A2[column_order_update_U2,:]) )
A3_11 = np.concatenate((A3_11, A3_12), 1)
A3_11 = np.concatenate((A3_11, A3[column_order_update_U3,:]) )
A1_12 = U1[ind_update_1, r_add[0]:].T
A2_12 = U2[ind_update_2, r_add[1]:].T
A3_12 = U3[ind_update_3, r_add[2]:].T
r[0] = r[0]+r_add[0]
r[1] = r[1]+r_add[1]
r[2] = r[2]+r_add[2]
U1, R1 = np.linalg.qr(UU1)
U2, R2 = np.linalg.qr(UU2)
U3, R3 = np.linalg.qr(UU3)
GG = np.tensordot(np.transpose(Ar,[2,1,0]),np.transpose(R1), (2, 0))
GG = np.tensordot(np.transpose(GG,[0,2,1]),np.transpose(R2), (2, 0))
GG = np.transpose(GG,[1,2,0])
G = np.tensordot(GG,np.transpose(R3), (2, 0))
G_Tucker = tuck.tensor(G, delta_cross)
if pr != None:
print('ranks after rounding = %s' % G_Tucker.r[0], G_Tucker.r[1], G_Tucker.r[2])
fun = tuck.tensor()
fun.core = G_Tucker.core
fun.u[0] = np.dot(U1, G_Tucker.u[0])
fun.u[1] = np.dot(U2, G_Tucker.u[1])
fun.u[2] = np.dot(U3, G_Tucker.u[2])
fun.r = G_Tucker.r
fun.n = n
return fun
def schur_comp(A, A11, A12, dtype):
r, r0 = A12.shape
R = r + r0
#print np.linalg.solve(A11.T, A[:,:r].T).T
S_hat = np.zeros((R,r0), dtype=dtype)
S_hat[:r, :] = np.dot(pinv(A11), -A12)#np.linalg.solve(A11, -A12)
S_hat[r:, :] = np.identity(r0)
#print A[:,:]
#uu, ss, vv = np.linalg.svd(np.dot(A, S_hat))
#'ss:', ss
Q, R = np.linalg.qr(np.dot(A, S_hat))
#Q , trash1, trash2 = round_matrix(np.dot(A, S_hat), delta_tucker)
return Q
def mod(X,Y):
return int(X/Y), X%Y
def maxvol_update(A, ind, dtype):
# finds new r0 good rows
# [ A11 A12]
# [ A21 A22] => S = A22 - A21 A11^(-1) A12
N, R = A.shape
r = len(ind)
r0 = R - r
S_hat = np.zeros((R, r0),dtype=dtype)
S_hat[:r, :] = np.linalg.solve(A[ind, :r], -A[ind, r:])
S_hat[r:, :] = np.identity(r0)
Q, R = np.linalg.qr(np.dot(A, S_hat))
ind_update = tuck.mv.maxvol(Q)
return ind_update
def column_update(UU, u, ind):
S = u - np.dot(UU, u[ind,:])
ind_add = tuck.mv.maxvol(S)
SS = np.dot(pinv(S[ind_add, :].T), S.T).T # WARNING! pinv instead of solve!
#np.linalg.solve(S[ind_add, :].T, S.T).T#np.dot(np.linalg.pinv(S[ind_add, :].T), S.T).T
U1 = UU - np.dot(SS, UU[ind_add])
U2 = SS
return np.concatenate((U1, U2), 1), ind_add
def H(A):
return np.transpose(np.conjugate(A))
def pinv(A):
try:
return np.linalg.pinv(A)
except: #LinAlgError
try:
print("PINV failded")
return np.linalg.pinv(A + 1e-12*np.linalg.norm(A, 1))
except:
print("PINV failded twice")
return np.linalg.pinv(A + 1e-8*np.linalg.norm(A, 1))
|
mit
|
ThomasKing2014/foresight
|
foresight/java/next_bits.py
|
4
|
1436
|
""" java.utils.Random utility functions.
These methods simulate the Random object 'next' function, and
are used to implement the other Random functions (nextInt, nextDouble, etc.)
Note that it is an error to pass a value for 'bits' that is larger than 32.
"""
from foresight import lcg
from ctypes import c_uint32
MULTIPLIER = 25214903917
INCREMENT = 11
MODULUS = 2**48
SHIFT_BITS = 16
def predict_state(values):
return lcg.predict_state(values,
MULTIPLIER,
INCREMENT,
MODULUS,
SHIFT_BITS)
def generate_values(state, bits):
gen = lcg.generate_values(state,
MULTIPLIER,
INCREMENT,
MODULUS,
0)
for prediction in gen:
yield prediction >> (48 - bits)
def from_seed(seed, bits):
seed = (seed ^ 0x5DEECE66D) & ((1 << 48) - 1)
gen = lcg.from_seed(seed, MULTIPLIER,
INCREMENT, MODULUS, 0)
for prediction in gen:
yield prediction >> (48 - bits)
def from_outputs(outputs, bits):
outputs = [c_uint32(o).value for o in outputs]
gen = lcg.from_outputs(outputs, MULTIPLIER,
INCREMENT, MODULUS,
48-bits)
for prediction in gen:
yield prediction & ((1 << bits) - 1)
|
mit
|
quxiaolong1504/django
|
tests/model_meta/test_legacy.py
|
199
|
7556
|
import warnings
from django import test
from django.contrib.contenttypes.fields import GenericRelation
from django.core.exceptions import FieldDoesNotExist
from django.db.models.fields import CharField, related
from django.utils.deprecation import RemovedInDjango110Warning
from .models import BasePerson, Person
from .results import TEST_RESULTS
class OptionsBaseTests(test.SimpleTestCase):
def _map_related_query_names(self, res):
return tuple((o.field.related_query_name(), m) for o, m in res)
def _map_names(self, res):
return tuple((f.name, m) for f, m in res)
class M2MTests(OptionsBaseTests):
def test_many_to_many_with_model(self):
for model, expected_result in TEST_RESULTS['many_to_many_with_model'].items():
with warnings.catch_warnings(record=True) as warning:
warnings.simplefilter("always")
models = [model for field, model in model._meta.get_m2m_with_model()]
self.assertEqual([RemovedInDjango110Warning], [w.message.__class__ for w in warning])
self.assertEqual(models, expected_result)
@test.ignore_warnings(category=RemovedInDjango110Warning)
class RelatedObjectsTests(OptionsBaseTests):
key_name = lambda self, r: r[0]
def test_related_objects(self):
result_key = 'get_all_related_objects_with_model_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model()
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_objects_local(self):
result_key = 'get_all_related_objects_with_model_local_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(local_only=True)
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_objects_include_hidden(self):
result_key = 'get_all_related_objects_with_model_hidden_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(include_hidden=True)
self.assertEqual(
sorted(self._map_names(objects), key=self.key_name),
sorted(expected, key=self.key_name)
)
def test_related_objects_include_hidden_local_only(self):
result_key = 'get_all_related_objects_with_model_hidden_local_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(
include_hidden=True, local_only=True)
self.assertEqual(
sorted(self._map_names(objects), key=self.key_name),
sorted(expected, key=self.key_name)
)
def test_related_objects_proxy(self):
result_key = 'get_all_related_objects_with_model_proxy_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(
include_proxy_eq=True)
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_objects_proxy_hidden(self):
result_key = 'get_all_related_objects_with_model_proxy_hidden_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(
include_proxy_eq=True, include_hidden=True)
self.assertEqual(
sorted(self._map_names(objects), key=self.key_name),
sorted(expected, key=self.key_name)
)
@test.ignore_warnings(category=RemovedInDjango110Warning)
class RelatedM2MTests(OptionsBaseTests):
def test_related_m2m_with_model(self):
result_key = 'get_all_related_many_to_many_with_model_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_m2m_objects_with_model()
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_m2m_local_only(self):
result_key = 'get_all_related_many_to_many_local_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_many_to_many_objects(local_only=True)
self.assertEqual([o.field.related_query_name() for o in objects], expected)
def test_related_m2m_asymmetrical(self):
m2m = Person._meta.many_to_many
self.assertTrue('following_base' in [f.attname for f in m2m])
related_m2m = Person._meta.get_all_related_many_to_many_objects()
self.assertTrue('followers_base' in [o.field.related_query_name() for o in related_m2m])
def test_related_m2m_symmetrical(self):
m2m = Person._meta.many_to_many
self.assertTrue('friends_base' in [f.attname for f in m2m])
related_m2m = Person._meta.get_all_related_many_to_many_objects()
self.assertIn('friends_inherited_rel_+', [o.field.related_query_name() for o in related_m2m])
@test.ignore_warnings(category=RemovedInDjango110Warning)
class GetFieldByNameTests(OptionsBaseTests):
def test_get_data_field(self):
field_info = Person._meta.get_field_by_name('data_abstract')
self.assertEqual(field_info[1:], (BasePerson, True, False))
self.assertIsInstance(field_info[0], CharField)
def test_get_m2m_field(self):
field_info = Person._meta.get_field_by_name('m2m_base')
self.assertEqual(field_info[1:], (BasePerson, True, True))
self.assertIsInstance(field_info[0], related.ManyToManyField)
def test_get_related_object(self):
field_info = Person._meta.get_field_by_name('relating_baseperson')
self.assertEqual(field_info[1:], (BasePerson, False, False))
self.assertTrue(field_info[0].auto_created)
def test_get_related_m2m(self):
field_info = Person._meta.get_field_by_name('relating_people')
self.assertEqual(field_info[1:], (None, False, True))
self.assertTrue(field_info[0].auto_created)
def test_get_generic_relation(self):
field_info = Person._meta.get_field_by_name('generic_relation_base')
self.assertEqual(field_info[1:], (None, True, False))
self.assertIsInstance(field_info[0], GenericRelation)
def test_get_m2m_field_invalid(self):
with warnings.catch_warnings(record=True) as warning:
warnings.simplefilter("always")
self.assertRaises(
FieldDoesNotExist,
Person._meta.get_field,
**{'field_name': 'm2m_base', 'many_to_many': False}
)
self.assertEqual(Person._meta.get_field('m2m_base', many_to_many=True).name, 'm2m_base')
# 2 RemovedInDjango110Warning messages should be raised, one for each call of get_field()
# with the 'many_to_many' argument.
self.assertEqual(
[RemovedInDjango110Warning, RemovedInDjango110Warning],
[w.message.__class__ for w in warning]
)
@test.ignore_warnings(category=RemovedInDjango110Warning)
class GetAllFieldNamesTestCase(OptionsBaseTests):
def test_get_all_field_names(self):
for model, expected_names in TEST_RESULTS['get_all_field_names'].items():
objects = model._meta.get_all_field_names()
self.assertEqual(sorted(map(str, objects)), sorted(expected_names))
|
bsd-3-clause
|
olinguyen/shogun
|
examples/undocumented/python/classifier_multiclassmachine.py
|
6
|
1152
|
#!/usr/bin/env python
from tools.multiclass_shared import prepare_data
[traindat, label_traindat, testdat, label_testdat] = prepare_data()
parameter_list = [[traindat,testdat,label_traindat,2.1,1,1e-5],[traindat,testdat,label_traindat,2.2,1,1e-5]]
def classifier_multiclassmachine (fm_train_real=traindat,fm_test_real=testdat,label_train_multiclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
from shogun import RealFeatures, MulticlassLabels
from shogun import GaussianKernel
from shogun import LibSVM, KernelMulticlassMachine, MulticlassOneVsRestStrategy
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=GaussianKernel(feats_train, feats_train, width)
labels=MulticlassLabels(label_train_multiclass)
classifier = LibSVM()
classifier.set_epsilon(epsilon)
#print labels.get_labels()
mc_classifier = KernelMulticlassMachine(MulticlassOneVsRestStrategy(),kernel,classifier,labels)
mc_classifier.train()
kernel.init(feats_train, feats_test)
out = mc_classifier.apply().get_labels()
return out
if __name__=='__main__':
print('MulticlassMachine')
classifier_multiclassmachine(*parameter_list[0])
|
gpl-3.0
|
AndrewPeelMV/Blender2.78c
|
2.78/scripts/modules/bpy/utils/__init__.py
|
2
|
19907
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
"""
This module contains utility functions specific to blender but
not associated with blenders internal data.
"""
__all__ = (
"blend_paths",
"escape_identifier",
"keyconfig_set",
"load_scripts",
"modules_from_path",
"preset_find",
"preset_paths",
"refresh_script_paths",
"register_class",
"register_module",
"register_manual_map",
"unregister_manual_map",
"make_rna_paths",
"manual_map",
"previews",
"resource_path",
"script_path_user",
"script_path_pref",
"script_paths",
"smpte_from_frame",
"smpte_from_seconds",
"units",
"unregister_class",
"unregister_module",
"user_resource",
)
from _bpy import (
_utils_units as units,
blend_paths,
escape_identifier,
register_class,
resource_path,
script_paths as _bpy_script_paths,
unregister_class,
user_resource as _user_resource,
)
import bpy as _bpy
import os as _os
import sys as _sys
import addon_utils as _addon_utils
_user_preferences = _bpy.context.user_preferences
_script_module_dirs = "startup", "modules"
def _test_import(module_name, loaded_modules):
use_time = _bpy.app.debug_python
if module_name in loaded_modules:
return None
if "." in module_name:
print("Ignoring '%s', can't import files containing "
"multiple periods" % module_name)
return None
if use_time:
import time
t = time.time()
try:
mod = __import__(module_name)
except:
import traceback
traceback.print_exc()
return None
if use_time:
print("time %s %.4f" % (module_name, time.time() - t))
loaded_modules.add(mod.__name__) # should match mod.__name__ too
return mod
def _sys_path_ensure(path):
if path not in _sys.path: # reloading would add twice
_sys.path.insert(0, path)
def modules_from_path(path, loaded_modules):
"""
Load all modules in a path and return them as a list.
:arg path: this path is scanned for scripts and packages.
:type path: string
:arg loaded_modules: already loaded module names, files matching these
names will be ignored.
:type loaded_modules: set
:return: all loaded modules.
:rtype: list
"""
modules = []
for mod_name, mod_path in _bpy.path.module_names(path):
mod = _test_import(mod_name, loaded_modules)
if mod:
modules.append(mod)
return modules
_global_loaded_modules = [] # store loaded module names for reloading.
import bpy_types as _bpy_types # keep for comparisons, never ever reload this.
def load_scripts(reload_scripts=False, refresh_scripts=False):
"""
Load scripts and run each modules register function.
:arg reload_scripts: Causes all scripts to have their unregister method
called before loading.
:type reload_scripts: bool
:arg refresh_scripts: only load scripts which are not already loaded
as modules.
:type refresh_scripts: bool
"""
use_time = _bpy.app.debug_python
if use_time:
import time
t_main = time.time()
loaded_modules = set()
if refresh_scripts:
original_modules = _sys.modules.values()
if reload_scripts:
_bpy_types.TypeMap.clear()
# just unload, don't change user defaults, this means we can sync
# to reload. note that they will only actually reload of the
# modification time changes. This `won't` work for packages so...
# its not perfect.
for module_name in [ext.module for ext in _user_preferences.addons]:
_addon_utils.disable(module_name)
def register_module_call(mod):
register = getattr(mod, "register", None)
if register:
try:
register()
except:
import traceback
traceback.print_exc()
else:
print("\nWarning! '%s' has no register function, "
"this is now a requirement for registerable scripts" %
mod.__file__)
def unregister_module_call(mod):
unregister = getattr(mod, "unregister", None)
if unregister:
try:
unregister()
except:
import traceback
traceback.print_exc()
def test_reload(mod):
import importlib
# reloading this causes internal errors
# because the classes from this module are stored internally
# possibly to refresh internal references too but for now, best not to.
if mod == _bpy_types:
return mod
try:
return importlib.reload(mod)
except:
import traceback
traceback.print_exc()
def test_register(mod):
if refresh_scripts and mod in original_modules:
return
if reload_scripts and mod:
print("Reloading:", mod)
mod = test_reload(mod)
if mod:
register_module_call(mod)
_global_loaded_modules.append(mod.__name__)
if reload_scripts:
# module names -> modules
_global_loaded_modules[:] = [_sys.modules[mod_name]
for mod_name in _global_loaded_modules]
# loop over and unload all scripts
_global_loaded_modules.reverse()
for mod in _global_loaded_modules:
unregister_module_call(mod)
for mod in _global_loaded_modules:
test_reload(mod)
del _global_loaded_modules[:]
from bpy_restrict_state import RestrictBlend
with RestrictBlend():
for base_path in script_paths():
for path_subdir in _script_module_dirs:
path = _os.path.join(base_path, path_subdir)
if _os.path.isdir(path):
_sys_path_ensure(path)
# only add this to sys.modules, don't run
if path_subdir == "modules":
continue
for mod in modules_from_path(path, loaded_modules):
test_register(mod)
# deal with addons separately
_initialize = getattr(_addon_utils, "_initialize", None)
if _initialize is not None:
# first time, use fast-path
_initialize()
del _addon_utils._initialize
else:
_addon_utils.reset_all(reload_scripts=reload_scripts)
del _initialize
# run the active integration preset
filepath = preset_find(_user_preferences.inputs.active_keyconfig,
"keyconfig")
if filepath:
keyconfig_set(filepath)
if reload_scripts:
import gc
print("gc.collect() -> %d" % gc.collect())
if use_time:
print("Python Script Load Time %.4f" % (time.time() - t_main))
# base scripts
_scripts = _os.path.join(_os.path.dirname(__file__),
_os.path.pardir,
_os.path.pardir,
)
_scripts = (_os.path.normpath(_scripts), )
def script_path_user():
"""returns the env var and falls back to home dir or None"""
path = _user_resource('SCRIPTS')
return _os.path.normpath(path) if path else None
def script_path_pref():
"""returns the user preference or None"""
path = _user_preferences.filepaths.script_directory
return _os.path.normpath(path) if path else None
def script_paths(subdir=None, user_pref=True, check_all=False):
"""
Returns a list of valid script paths.
:arg subdir: Optional subdir.
:type subdir: string
:arg user_pref: Include the user preference script path.
:type user_pref: bool
:arg check_all: Include local, user and system paths rather just the paths
blender uses.
:type check_all: bool
:return: script paths.
:rtype: list
"""
scripts = list(_scripts)
# Only paths Blender uses.
#
# Needed this is needed even when 'check_all' is enabled,
# so the 'BLENDER_SYSTEM_SCRIPTS' environment variable will be used.
base_paths = _bpy_script_paths()
if check_all:
# All possible paths, no duplicates, keep order.
base_paths = (
*(path for path in (_os.path.join(resource_path(res), "scripts")
for res in ('LOCAL', 'USER', 'SYSTEM')) if path not in base_paths),
*base_paths,
)
for path in (*base_paths, script_path_user(), script_path_pref()):
if path:
path = _os.path.normpath(path)
if path not in scripts and _os.path.isdir(path):
scripts.append(path)
if subdir is None:
return scripts
scripts_subdir = []
for path in scripts:
path_subdir = _os.path.join(path, subdir)
if _os.path.isdir(path_subdir):
scripts_subdir.append(path_subdir)
return scripts_subdir
def refresh_script_paths():
"""
Run this after creating new script paths to update sys.path
"""
for base_path in script_paths():
for path_subdir in _script_module_dirs:
path = _os.path.join(base_path, path_subdir)
if _os.path.isdir(path):
_sys_path_ensure(path)
for path in _addon_utils.paths():
_sys_path_ensure(path)
path = _os.path.join(path, "modules")
if _os.path.isdir(path):
_sys_path_ensure(path)
def preset_paths(subdir):
"""
Returns a list of paths for a specific preset.
:arg subdir: preset subdirectory (must not be an absolute path).
:type subdir: string
:return: script paths.
:rtype: list
"""
dirs = []
for path in script_paths("presets", check_all=True):
directory = _os.path.join(path, subdir)
if not directory.startswith(path):
raise Exception("invalid subdir given %r" % subdir)
elif _os.path.isdir(directory):
dirs.append(directory)
# Find addons preset paths
for path in _addon_utils.paths():
directory = _os.path.join(path, "presets", subdir)
if _os.path.isdir(directory):
dirs.append(directory)
return dirs
def smpte_from_seconds(time, fps=None):
"""
Returns an SMPTE formatted string from the *time*:
``HH:MM:SS:FF``.
If the *fps* is not given the current scene is used.
:arg time: time in seconds.
:type time: int, float or ``datetime.timedelta``.
:return: the frame string.
:rtype: string
"""
return smpte_from_frame(time_to_frame(time, fps=fps), fps)
def smpte_from_frame(frame, fps=None, fps_base=None):
"""
Returns an SMPTE formatted string from the *frame*:
``HH:MM:SS:FF``.
If *fps* and *fps_base* are not given the current scene is used.
:arg frame: frame number.
:type frame: int or float.
:return: the frame string.
:rtype: string
"""
if fps is None:
fps = _bpy.context.scene.render.fps
if fps_base is None:
fps_base = _bpy.context.scene.render.fps_base
sign = "-" if frame < 0 else ""
frame = abs(frame * fps_base)
return (
"%s%02d:%02d:%02d:%02d" % (
sign,
int(frame / (3600 * fps)), # HH
int((frame / (60 * fps)) % 60), # MM
int((frame / fps) % 60), # SS
int(frame % fps), # FF
))
def time_from_frame(frame, fps=None, fps_base=None):
"""
Returns the time from a frame number .
If *fps* and *fps_base* are not given the current scene is used.
:arg frame: number.
:type frame: int or float.
:return: the time in seconds.
:rtype: datetime.timedelta
"""
if fps is None:
fps = _bpy.context.scene.render.fps
if fps_base is None:
fps_base = _bpy.context.scene.render.fps_base
from datetime import timedelta
return timedelta(0, (frame * fps_base) / fps)
def time_to_frame(time, fps=None, fps_base=None):
"""
Returns a float frame number from a time given in seconds or
as a datetime.timedelta object.
If *fps* and *fps_base* are not given the current scene is used.
:arg time: time in seconds.
:type time: number or a ``datetime.timedelta`` object
:return: the frame.
:rtype: float
"""
if fps is None:
fps = _bpy.context.scene.render.fps
if fps_base is None:
fps_base = _bpy.context.scene.render.fps_base
from datetime import timedelta
if isinstance(time, timedelta):
time = time.total_seconds()
return (time / fps_base) * fps
def preset_find(name, preset_path, display_name=False, ext=".py"):
if not name:
return None
for directory in preset_paths(preset_path):
if display_name:
filename = ""
for fn in _os.listdir(directory):
if fn.endswith(ext) and name == _bpy.path.display_name(fn):
filename = fn
break
else:
filename = name + ext
if filename:
filepath = _os.path.join(directory, filename)
if _os.path.exists(filepath):
return filepath
def keyconfig_set(filepath, report=None):
from os.path import basename, splitext
from itertools import chain
if _bpy.app.debug_python:
print("loading preset:", filepath)
keyconfigs = _bpy.context.window_manager.keyconfigs
keyconfigs_old = keyconfigs[:]
try:
error_msg = ""
with open(filepath, 'r', encoding='utf-8') as keyfile:
exec(compile(keyfile.read(), filepath, "exec"),
{"__file__": filepath})
except:
import traceback
error_msg = traceback.format_exc()
if error_msg:
if report is not None:
report({'ERROR'}, error_msg)
print(error_msg)
kc_new = next(chain(iter(kc for kc in keyconfigs
if kc not in keyconfigs_old), (None,)))
if kc_new is None:
if report is not None:
report({'ERROR'}, "Failed to load keymap %r" % filepath)
return False
else:
kc_new.name = ""
# remove duplicates
name = splitext(basename(filepath))[0]
while True:
kc_dupe = keyconfigs.get(name)
if kc_dupe:
keyconfigs.remove(kc_dupe)
else:
break
kc_new.name = name
keyconfigs.active = kc_new
return True
def user_resource(resource_type, path="", create=False):
"""
Return a user resource path (normally from the users home directory).
:arg type: Resource type in ['DATAFILES', 'CONFIG', 'SCRIPTS', 'AUTOSAVE'].
:type type: string
:arg subdir: Optional subdirectory.
:type subdir: string
:arg create: Treat the path as a directory and create
it if its not existing.
:type create: boolean
:return: a path.
:rtype: string
"""
target_path = _user_resource(resource_type, path)
if create:
# should always be true.
if target_path:
# create path if not existing.
if not _os.path.exists(target_path):
try:
_os.makedirs(target_path)
except:
import traceback
traceback.print_exc()
target_path = ""
elif not _os.path.isdir(target_path):
print("Path %r found but isn't a directory!" % target_path)
target_path = ""
return target_path
def _bpy_module_classes(module, is_registered=False):
typemap_list = _bpy_types.TypeMap.get(module, ())
i = 0
while i < len(typemap_list):
cls_weakref = typemap_list[i]
cls = cls_weakref()
if cls is None:
del typemap_list[i]
else:
if is_registered == cls.is_registered:
yield cls
i += 1
def register_module(module, verbose=False):
if verbose:
print("bpy.utils.register_module(%r): ..." % module)
cls = None
for cls in _bpy_module_classes(module, is_registered=False):
if verbose:
print(" %r" % cls)
try:
register_class(cls)
except:
print("bpy.utils.register_module(): "
"failed to registering class %r" % cls)
import traceback
traceback.print_exc()
if verbose:
print("done.\n")
if cls is None:
raise Exception("register_module(%r): defines no classes" % module)
def unregister_module(module, verbose=False):
if verbose:
print("bpy.utils.unregister_module(%r): ..." % module)
for cls in _bpy_module_classes(module, is_registered=True):
if verbose:
print(" %r" % cls)
try:
unregister_class(cls)
except:
print("bpy.utils.unregister_module(): "
"failed to unregistering class %r" % cls)
import traceback
traceback.print_exc()
if verbose:
print("done.\n")
# -----------------------------------------------------------------------------
# Manual lookups, each function has to return a basepath and a sequence
# of...
# we start with the built-in default mapping
def _blender_default_map():
import rna_manual_reference as ref_mod
ret = (ref_mod.url_manual_prefix, ref_mod.url_manual_mapping)
# avoid storing in memory
del _sys.modules["rna_manual_reference"]
return ret
# hooks for doc lookups
_manual_map = [_blender_default_map]
def register_manual_map(manual_hook):
_manual_map.append(manual_hook)
def unregister_manual_map(manual_hook):
_manual_map.remove(manual_hook)
def manual_map():
# reverse so default is called last
for cb in reversed(_manual_map):
try:
prefix, url_manual_mapping = cb()
except:
print("Error calling %r" % cb)
import traceback
traceback.print_exc()
continue
yield prefix, url_manual_mapping
# Build an RNA path from struct/property/enum names.
def make_rna_paths(struct_name, prop_name, enum_name):
"""
Create RNA "paths" from given names.
:arg struct_name: Name of a RNA struct (like e.g. "Scene").
:type struct_name: string
:arg prop_name: Name of a RNA struct's property.
:type prop_name: string
:arg enum_name: Name of a RNA enum identifier.
:type enum_name: string
:return: A triple of three "RNA paths"
(most_complete_path, "struct.prop", "struct.prop:'enum'").
If no enum_name is given, the third element will always be void.
:rtype: tuple of strings
"""
src = src_rna = src_enum = ""
if struct_name:
if prop_name:
src = src_rna = ".".join((struct_name, prop_name))
if enum_name:
src = src_enum = "%s:'%s'" % (src_rna, enum_name)
else:
src = src_rna = struct_name
return src, src_rna, src_enum
|
gpl-2.0
|
SteveDiamond/cvxpy
|
cvxpy/atoms/mixed_norm.py
|
3
|
1240
|
"""
Copyright 2013 Steven Diamond
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cvxpy.expressions.expression import Expression
from cvxpy.atoms.norm import norm
def mixed_norm(X, p=2, q=1):
"""Lp,q norm; :math:`(\\sum_k (\\sum_l \\lvert x_{k,l} \\rvert^p)^{q/p})^{1/q}`.
Parameters
----------
X : Expression or numeric constant
The matrix to take the l_{p,q} norm of.
p : int or str, optional
The type of inner norm.
q : int or str, optional
The type of outer norm.
Returns
-------
Expression
An Expression representing the mixed norm.
"""
X = Expression.cast_to_const(X)
# inner norms
vecnorms = norm(X, p, axis=1)
# outer norm
return norm(vecnorms, q)
|
gpl-3.0
|
SpamScope/spamscope
|
src/bolts/json_maker.py
|
1
|
2958
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2016 Fedele Mantuano (https://www.linkedin.com/in/fmantuano/)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import, print_function, unicode_literals
from streamparse.bolt import Bolt
from modules import reformat_urls
class JsonMaker(Bolt):
outputs = ['sha256_random', 'json']
def initialize(self, stormconf, context):
self._mails = {}
self.input_bolts = set(context["source->stream->grouping"].keys())
def _compose_output(self, greedy_data):
# # # Tokenizer # # #
mail = greedy_data["tokenizer"][1]
mail["is_filtered"] = greedy_data["tokenizer"][2]
# # # Attachments # # #
# with_attachments: the mail has raw attachments
mail["with_attachments"] = greedy_data["attachments"][1]
attachments = greedy_data["attachments"][2]
if attachments:
mail["attachments"] = attachments
# # # Urls # # #
urls_body = greedy_data["urls"][1].get("body", {})
urls_attachments = greedy_data["urls"][1].get("attachments", {})
if urls_body:
mail.setdefault("urls", {}).update(
{"body": reformat_urls(urls_body)})
if urls_attachments:
mail.setdefault("urls", {}).update(
{"attachments": reformat_urls(urls_attachments)})
# # # Network # # #
network = greedy_data["network"][1]
mail["network"] = {"is_filtered": greedy_data["network"][2]}
if network:
mail["network"].update(network)
# # # Raw mail # # #
raw_mail = greedy_data["raw_mail"][1]
mail["raw_mail"] = {"is_filtered": greedy_data["raw_mail"][2]}
if raw_mail:
mail["raw_mail"].update(raw_mail)
# # # Phishing # # #
phishing = greedy_data["phishing"][1]
if phishing:
mail["phishing"] = phishing
return mail
def process(self, tup):
bolt = tup.component
sha256_random = tup.values[0]
values = tup.values
self._mails.setdefault(sha256_random, {})[bolt] = values
diff = self.input_bolts - set(self._mails[sha256_random].keys())
if not diff:
output_json = self._compose_output(self._mails.pop(sha256_random))
self.log("New JSON for mail {!r}".format(sha256_random), "debug")
self.emit([sha256_random, output_json])
|
apache-2.0
|
flightcoin/flightcoin
|
share/qt/extract_strings_qt.py
|
2945
|
1844
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
|
mit
|
cryptokoin/geocoinq
|
share/qt/extract_strings_qt.py
|
2945
|
1844
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
|
mit
|
czgu/metaHack
|
env/lib/python2.7/site-packages/django/template/smartif.py
|
111
|
6276
|
"""
Parser and utilities for the smart 'if' tag
"""
# Using a simple top down parser, as described here:
# http://effbot.org/zone/simple-top-down-parsing.htm.
# 'led' = left denotation
# 'nud' = null denotation
# 'bp' = binding power (left = lbp, right = rbp)
class TokenBase(object):
"""
Base class for operators and literals, mainly for debugging and for throwing
syntax errors.
"""
id = None # node/token type name
value = None # used by literals
first = second = None # used by tree nodes
def nud(self, parser):
# Null denotation - called in prefix context
raise parser.error_class(
"Not expecting '%s' in this position in if tag." % self.id
)
def led(self, left, parser):
# Left denotation - called in infix context
raise parser.error_class(
"Not expecting '%s' as infix operator in if tag." % self.id
)
def display(self):
"""
Returns what to display in error messages for this node
"""
return self.id
def __repr__(self):
out = [str(x) for x in [self.id, self.first, self.second] if x is not None]
return "(" + " ".join(out) + ")"
def infix(bp, func):
"""
Creates an infix operator, given a binding power and a function that
evaluates the node
"""
class Operator(TokenBase):
lbp = bp
def led(self, left, parser):
self.first = left
self.second = parser.expression(bp)
return self
def eval(self, context):
try:
return func(context, self.first, self.second)
except Exception:
# Templates shouldn't throw exceptions when rendering. We are
# most likely to get exceptions for things like {% if foo in bar
# %} where 'bar' does not support 'in', so default to False
return False
return Operator
def prefix(bp, func):
"""
Creates a prefix operator, given a binding power and a function that
evaluates the node.
"""
class Operator(TokenBase):
lbp = bp
def nud(self, parser):
self.first = parser.expression(bp)
self.second = None
return self
def eval(self, context):
try:
return func(context, self.first)
except Exception:
return False
return Operator
# Operator precedence follows Python.
# NB - we can get slightly more accurate syntax error messages by not using the
# same object for '==' and '='.
# We defer variable evaluation to the lambda to ensure that terms are
# lazily evaluated using Python's boolean parsing logic.
OPERATORS = {
'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)),
'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)),
'not': prefix(8, lambda context, x: not x.eval(context)),
'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)),
'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)),
'=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)),
'>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)),
'>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)),
'<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)),
'<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)),
}
# Assign 'id' to each:
for key, op in OPERATORS.items():
op.id = key
class Literal(TokenBase):
"""
A basic self-resolvable object similar to a Django template variable.
"""
# IfParser uses Literal in create_var, but TemplateIfParser overrides
# create_var so that a proper implementation that actually resolves
# variables, filters etc is used.
id = "literal"
lbp = 0
def __init__(self, value):
self.value = value
def display(self):
return repr(self.value)
def nud(self, parser):
return self
def eval(self, context):
return self.value
def __repr__(self):
return "(%s %r)" % (self.id, self.value)
class EndToken(TokenBase):
lbp = 0
def nud(self, parser):
raise parser.error_class("Unexpected end of expression in if tag.")
EndToken = EndToken()
class IfParser(object):
error_class = ValueError
def __init__(self, tokens):
# pre-pass necessary to turn 'not','in' into single token
l = len(tokens)
mapped_tokens = []
i = 0
while i < l:
token = tokens[i]
if token == "not" and i + 1 < l and tokens[i + 1] == "in":
token = "not in"
i += 1 # skip 'in'
mapped_tokens.append(self.translate_token(token))
i += 1
self.tokens = mapped_tokens
self.pos = 0
self.current_token = self.next_token()
def translate_token(self, token):
try:
op = OPERATORS[token]
except (KeyError, TypeError):
return self.create_var(token)
else:
return op()
def next_token(self):
if self.pos >= len(self.tokens):
return EndToken
else:
retval = self.tokens[self.pos]
self.pos += 1
return retval
def parse(self):
retval = self.expression()
# Check that we have exhausted all the tokens
if self.current_token is not EndToken:
raise self.error_class("Unused '%s' at end of if expression." %
self.current_token.display())
return retval
def expression(self, rbp=0):
t = self.current_token
self.current_token = self.next_token()
left = t.nud(self)
while rbp < self.current_token.lbp:
t = self.current_token
self.current_token = self.next_token()
left = t.led(left, self)
return left
def create_var(self, value):
return Literal(value)
|
apache-2.0
|
Centreon-Community/centreon-discovery
|
modPython/MySQL-python-1.2.3/setup_posix.py
|
2
|
3225
|
from ConfigParser import SafeConfigParser
# This dequote() business is required for some older versions
# of mysql_config
def dequote(s):
if s[0] in "\"'" and s[0] == s[-1]:
s = s[1:-1]
return s
def compiler_flag(f):
return "-%s" % f
def mysql_config(what):
from os import popen
f = popen("%s --%s" % (mysql_config.path, what))
data = f.read().strip().split()
ret = f.close()
if ret:
if ret/256:
data = []
if ret/256 > 1:
raise EnvironmentError("%s not found" % (mysql_config.path,))
return data
mysql_config.path = "mysql_config"
def get_config():
import os, sys
from setup_common import get_metadata_and_options, enabled, create_release_file
metadata, options = get_metadata_and_options()
if 'mysql_config' in options:
mysql_config.path = options['mysql_config']
extra_objects = []
static = enabled(options, 'static')
if enabled(options, 'embedded'):
libs = mysql_config("libmysqld-libs")
client = "mysqld"
elif enabled(options, 'threadsafe'):
libs = mysql_config("libs_r")
client = "mysqlclient_r"
if not libs:
libs = mysql_config("libs")
client = "mysqlclient"
else:
libs = mysql_config("libs")
client = "mysqlclient"
library_dirs = [ dequote(i[2:]) for i in libs if i.startswith(compiler_flag("L")) ]
libraries = [ dequote(i[2:]) for i in libs if i.startswith(compiler_flag("l")) ]
removable_compile_args = [ compiler_flag(f) for f in "ILl" ]
extra_compile_args = [ i.replace("%", "%%") for i in mysql_config("cflags")
if i[:2] not in removable_compile_args ]
# Copy the arch flags for linking as well
extra_link_args = list()
for i in range(len(extra_compile_args)):
if extra_compile_args[i] == '-arch':
extra_link_args += ['-arch', extra_compile_args[i + 1]]
include_dirs = [ dequote(i[2:])
for i in mysql_config('include')
if i.startswith(compiler_flag('I')) ]
if not include_dirs: # fix for MySQL-3.23
include_dirs = [ dequote(i[2:])
for i in mysql_config('cflags')
if i.startswith(compiler_flag('I')) ]
if static:
extra_objects.append(os.path.join(
library_dirs[0],'lib%s.a' % client))
name = "MySQL-python"
if enabled(options, 'embedded'):
name = name + "-embedded"
metadata['name'] = name
define_macros = [
('version_info', metadata['version_info']),
('__version__', metadata['version']),
]
create_release_file(metadata)
del metadata['version_info']
ext_options = dict(
name = "_mysql",
library_dirs = library_dirs,
libraries = libraries,
extra_compile_args = extra_compile_args,
extra_link_args = extra_link_args,
include_dirs = include_dirs,
extra_objects = extra_objects,
define_macros = define_macros,
)
return metadata, ext_options
if __name__ == "__main__":
print """You shouldn't be running this directly; it is used by setup.py."""
|
gpl-2.0
|
suto/infernal-twin
|
build/pip/pip/pep425tags.py
|
249
|
4427
|
"""Generate and work with PEP 425 Compatibility Tags."""
from __future__ import absolute_import
import re
import sys
import warnings
try:
import sysconfig
except ImportError: # pragma nocover
# Python < 2.7
import distutils.sysconfig as sysconfig
import distutils.util
_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
def get_abbr_impl():
"""Return abbreviated implementation name."""
if hasattr(sys, 'pypy_version_info'):
pyimpl = 'pp'
elif sys.platform.startswith('java'):
pyimpl = 'jy'
elif sys.platform == 'cli':
pyimpl = 'ip'
else:
pyimpl = 'cp'
return pyimpl
def get_impl_ver():
"""Return implementation version."""
return ''.join(map(str, sys.version_info[:2]))
def get_platform():
"""Return our platform name 'win32', 'linux_x86_64'"""
# XXX remove distutils dependency
return distutils.util.get_platform().replace('.', '_').replace('-', '_')
def get_supported(versions=None, noarch=False):
"""Return a list of supported tags for each version specified in
`versions`.
:param versions: a list of string versions, of the form ["33", "32"],
or None. The first version will be assumed to support our ABI.
"""
supported = []
# Versions must be given with respect to the preference
if versions is None:
versions = []
major = sys.version_info[0]
# Support all previous minor Python versions.
for minor in range(sys.version_info[1], -1, -1):
versions.append(''.join(map(str, (major, minor))))
impl = get_abbr_impl()
abis = []
try:
soabi = sysconfig.get_config_var('SOABI')
except IOError as e: # Issue #1074
warnings.warn("{0}".format(e), RuntimeWarning)
soabi = None
if soabi and soabi.startswith('cpython-'):
abis[0:0] = ['cp' + soabi.split('-')[1]]
abi3s = set()
import imp
for suffix in imp.get_suffixes():
if suffix[0].startswith('.abi'):
abi3s.add(suffix[0].split('.', 2)[1])
abis.extend(sorted(list(abi3s)))
abis.append('none')
if not noarch:
arch = get_platform()
if sys.platform == 'darwin':
# support macosx-10.6-intel on macosx-10.9-x86_64
match = _osx_arch_pat.match(arch)
if match:
name, major, minor, actual_arch = match.groups()
actual_arches = [actual_arch]
if actual_arch in ('i386', 'ppc'):
actual_arches.append('fat')
if actual_arch in ('i386', 'x86_64'):
actual_arches.append('intel')
if actual_arch in ('i386', 'ppc', 'x86_64'):
actual_arches.append('fat3')
if actual_arch in ('ppc64', 'x86_64'):
actual_arches.append('fat64')
if actual_arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
actual_arches.append('universal')
tpl = '{0}_{1}_%i_%s'.format(name, major)
arches = []
for m in range(int(minor) + 1):
for a in actual_arches:
arches.append(tpl % (m, a))
else:
# arch pattern didn't match (?!)
arches = [arch]
else:
arches = [arch]
# Current version, current API (built specifically for our Python):
for abi in abis:
for arch in arches:
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
# Has binaries, does not use the Python API:
supported.append(('py%s' % (versions[0][0]), 'none', arch))
# No abi / arch, but requires our implementation:
for i, version in enumerate(versions):
supported.append(('%s%s' % (impl, version), 'none', 'any'))
if i == 0:
# Tagged specifically as being cross-version compatible
# (with just the major version specified)
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
# No abi / arch, generic Python
for i, version in enumerate(versions):
supported.append(('py%s' % (version,), 'none', 'any'))
if i == 0:
supported.append(('py%s' % (version[0]), 'none', 'any'))
return supported
supported_tags = get_supported()
supported_tags_noarch = get_supported(noarch=True)
|
gpl-3.0
|
Abhayakara/dnspython
|
dns/rdtypes/txtbase.py
|
8
|
2918
|
# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""TXT-like base class."""
import dns.exception
import dns.rdata
import dns.tokenizer
class TXTBase(dns.rdata.Rdata):
"""Base class for rdata that is like a TXT record
@ivar strings: the text strings
@type strings: list of string
@see: RFC 1035"""
__slots__ = ['strings']
def __init__(self, rdclass, rdtype, strings):
super(TXTBase, self).__init__(rdclass, rdtype)
if isinstance(strings, str):
strings = [ strings ]
self.strings = strings[:]
def to_text(self, origin=None, relativize=True, **kw):
txt = ''
prefix = ''
for s in self.strings:
txt += '%s"%s"' % (prefix, dns.rdata._escapify(s))
prefix = ' '
return txt
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
strings = []
while 1:
token = tok.get().unescape()
if token.is_eol_or_eof():
break
if not (token.is_quoted_string() or token.is_identifier()):
raise dns.exception.SyntaxError("expected a string")
if len(token.value) > 255:
raise dns.exception.SyntaxError("string too long")
strings.append(token.value)
if len(strings) == 0:
raise dns.exception.UnexpectedEnd
return cls(rdclass, rdtype, strings)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
for s in self.strings:
l = len(s)
assert l < 256
byte = chr(l)
file.write(byte)
file.write(s)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
strings = []
while rdlen > 0:
l = ord(wire[current])
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
s = wire[current : current + l].unwrap()
current += l
rdlen -= l
strings.append(s)
return cls(rdclass, rdtype, strings)
from_wire = classmethod(from_wire)
|
isc
|
yaoice/dzhops
|
managekeys/utils.py
|
1
|
1677
|
# -*- coding: utf-8 -*-
from hostlist.models import HostList, Dzhuser
import logging
log = logging.getLogger('opsmaster')
def clearUpMinionKyes(idlist, dc, eg):
'''
对Minion id进行整理,返回对应状态、机房、维护人员的minion id;
:param idlist: acp/pre/rej(分别表示已经接受、未接受、已拒绝三个状态)
:param dc: 机房英文简称
:param eg: 维护人员用户名,英文简称;
:return: 过滤后的minion id 组成的列表;
'''
if dc == 'DC_ALL' and eg == 'EG_ALL':
result = idlist
elif dc != 'DC_ALL' and eg == 'EG_ALL':
result = []
for id in idlist:
id_dcen = id.split("_")
if id_dcen[3] == dc:
result.append(id)
elif dc == 'DC_ALL' and eg != 'EG_ALL':
eg_id_list = []
engi_result = Dzhuser.objects.get(username=eg)
data = HostList.objects.filter(engineer=engi_result.engineer)
for row in data:
eg_id_list.append(row.minionid)
result = list(set(idlist).intersection(set(eg_id_list)))
elif dc != 'DC_ALL' and eg != 'EG_ALL':
dc_id_list = []
eg_id_list = []
for id in idlist:
id_dcen = id.split("_")
if id_dcen[3] == dc:
dc_id_list.append(id)
engi_result = Dzhuser.objects.get(username=eg)
data = HostList.objects.filter(engineer=engi_result.engineer)
for row in data:
eg_id_list.append(row.minionid)
result = list(set(dc_id_list).intersection(set(eg_id_list)))
else:
result = []
log.error("Unexpected execution here.")
return result
|
apache-2.0
|
talexop/talexop_kernel_i9505_4.3
|
tools/perf/scripts/python/sched-migration.py
|
11215
|
11670
|
#!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
|
gpl-2.0
|
CloverHealth/airflow
|
airflow/contrib/operators/sftp_operator.py
|
4
|
4232
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.contrib.hooks.ssh_hook import SSHHook
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class SFTPOperation(object):
PUT = 'put'
GET = 'get'
class SFTPOperator(BaseOperator):
"""
SFTPOperator for transferring files from remote host to local or vice a versa.
This operator uses ssh_hook to open sftp trasport channel that serve as basis
for file transfer.
:param ssh_hook: predefined ssh_hook to use for remote execution
:type ssh_hook: :class:`SSHHook`
:param ssh_conn_id: connection id from airflow Connections
:type ssh_conn_id: str
:param remote_host: remote host to connect
:type remote_host: str
:param local_filepath: local file path to get or put. (templated)
:type local_filepath: str
:param remote_filepath: remote file path to get or put. (templated)
:type remote_filepath: str
:param operation: specify operation 'get' or 'put', defaults to get
:type get: bool
"""
template_fields = ('local_filepath', 'remote_filepath')
@apply_defaults
def __init__(self,
ssh_hook=None,
ssh_conn_id=None,
remote_host=None,
local_filepath=None,
remote_filepath=None,
operation=SFTPOperation.PUT,
*args,
**kwargs):
super(SFTPOperator, self).__init__(*args, **kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.local_filepath = local_filepath
self.remote_filepath = remote_filepath
self.operation = operation
if not (self.operation.lower() == SFTPOperation.GET or
self.operation.lower() == SFTPOperation.PUT):
raise TypeError("unsupported operation value {0}, expected {1} or {2}"
.format(self.operation, SFTPOperation.GET, SFTPOperation.PUT))
def execute(self, context):
file_msg = None
try:
if self.ssh_conn_id and not self.ssh_hook:
self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
if not self.ssh_hook:
raise AirflowException("can not operate without ssh_hook or ssh_conn_id")
if self.remote_host is not None:
self.ssh_hook.remote_host = self.remote_host
ssh_client = self.ssh_hook.get_conn()
sftp_client = ssh_client.open_sftp()
if self.operation.lower() == SFTPOperation.GET:
file_msg = "from {0} to {1}".format(self.remote_filepath,
self.local_filepath)
self.log.debug("Starting to transfer %s", file_msg)
sftp_client.get(self.remote_filepath, self.local_filepath)
else:
file_msg = "from {0} to {1}".format(self.local_filepath,
self.remote_filepath)
self.log.debug("Starting to transfer file %s", file_msg)
sftp_client.put(self.local_filepath, self.remote_filepath)
except Exception as e:
raise AirflowException("Error while transferring {0}, error: {1}"
.format(file_msg, str(e)))
return None
|
apache-2.0
|
ishay2b/tensorflow
|
tensorflow/contrib/fused_conv/__init__.py
|
87
|
1113
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops and modules related to fused_conv2d_bias_activation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.fused_conv.python.ops.fused_conv2d_bias_activation_op import *
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__, ['fused_conv2d_bias_activation'])
|
apache-2.0
|
ericjang/cryptocurrency_arbitrage
|
BTER.py
|
2
|
4991
|
from Exchange import Exchange
import bterapi
import os
from order import Order
class BTER(Exchange):
def __init__(self, keyfile):
keyfile = os.path.abspath(keyfile)
self.keyhandler = bterapi.KeyHandler(keyfile)
key = self.keyhandler.getKeys()[0]
self.conn = bterapi.BTERConnection()
self.api = bterapi.TradeAPI(key, self.keyhandler)
super(BTER, self).__init__()
self.name = 'BTER'
self.trading_fee = 0.002
def get_tradeable_pairs(self):
tradeable_pairs = []
for pair in bterapi.all_pairs:
a, b = pair.split("_")
tradeable_pairs.append((a.upper(), b.upper()))
return tradeable_pairs
def get_depth(self, base, alt):
book = {'bids': [], 'asks': []}
pair, swapped = self.get_validated_pair((base, alt))
if pair is None:
return
pairstr = pair[0].lower() + "_" + pair[1].lower()
asks, bids = bterapi.getDepth(pairstr)
book['bids'] = [Order(float(b[0]), float(b[1])) for b in bids]
book['asks'] = [Order(float(a[0]), float(a[1])) for a in asks]
return book
def get_balance(self, currency):
funds = self.api.getFunds(self.conn, error_handler=None)
if currency in funds:
return float(funds[currency])
else:
return 0.0
# data = self.api.getInfo(connection = self.conn)
#return getattr(data, 'balance_' + currency.lower())
def get_all_balances(self):
funds = self.api.getFunds(self.conn, error_handler=None)
return {k:float(v) for k,v in funds.items()}
def submit_order(self, gc, gv, rc, rv):
return NotImplemented
# pair, swapped = self.get_validated_pair((rc, gc))
# print swapped
# if pair is None:
# return
# pairstr = pair[0].lower() + "_" + pair[1].lower()
# if swapped:
# price = rv/gv
# self.api.trade(pairstr, 'sell', price, gv)
# else:
# price = gv/rv
# self.api.trade(pairstr, 'buy', price, rv)
def confirm_order(self, orderID):
pass
# from Exchange import Exchange
# from PairList import PairList
# import bterapi
#
# class BTER(Exchange):
# """docstring for BTER"""
# def __init__(self, mode, keyfile):
# super(BTER, self).__init__(mode)
# self.name = 'BTER'
# self.full_orderbook = False
# self.trading_fee = 0.002
# # bterapi already kindly provides us with a list of supported pairs
# for pair in bterapi.all_pairs:
# a, b = pair.split("_")
# self.supported_pairs.add_pair((a.upper(), b.upper()))
# # set up API
# self.keyhandler = bterapi.KeyHandler(keyfile)
# key = self.keyhandler.getKeys()[0]
# self.conn = bterapi.BTERConnection()
# self.api = bterapi.TradeAPI(key, self.keyhandler)
#
# def live_order(self, recv_currency, recv_volume, give_currency, give_volume):
#
# return NotImplemented
#
# def update_live_balance(self, currency=None):
# data = self.api.getFunds(self.conn, error_handler=None)
# foo = 1
# return NotImplemented
#
# def update_orders(self, pair):
# a, b = pair
# pairstr = a.lower() + "_" + b.lower()
# pairstr, flipped = self.check_swapped(pairstr)
# if flipped:
# alt, base = pair
# else:
# base, alt = pair
# # base, alt not necessarily the same as one dictated in ArbitrageBot class
# asks, bids = bterapi.getDepth(pairstr)
# for bid in bids:
# rate = float(bid[0])
# give_vol = float(bid[1])
# recv_vol = give_vol * rate
# order = {"give_currency" : base, \
# "recv_currency" : alt, \
# "give_volume" : give_vol, \
# "recv_volume" : recv_vol }
# self.orderbook.set_order(order)
# for ask in asks:
# rate = float(ask[0])
# recv_vol = float(ask[1])
# give_vol = recv_vol * rate
# order = {"give_currency" : alt, \
# "recv_currency" : base, \
# "give_volume" : give_vol, \
# "recv_volume" : recv_vol }
# self.orderbook.set_order(order)
#
#
# def check_swapped(self, pairstr):
# '''
# returns swapped_pair, True if pair has been swapped
# else returns pairstr, False
# '''
# if pairstr not in bterapi.all_pairs:
# if "_" in pairstr:
# a, b = pairstr.split("_")
# swapped_pair = "%s_%s" % (b.lower(), a.lower())
# if swapped_pair in bterapi.all_pairs:
# return swapped_pair, True
# msg = "unrecognized pair " + pairstr
# raise Exception(msg)
# return pairstr, False
|
gpl-3.0
|
Kegbot/kegbot-server
|
pykeg/backup/postgres.py
|
1
|
1907
|
"""Postgres-specific database backup/restore implementation."""
import logging
import os
import subprocess
from django.conf import settings
logger = logging.getLogger(__name__)
DEFAULT_DB = "default"
# Common command-line arguments
PARAMS = {
"db": settings.DATABASES[DEFAULT_DB].get("NAME"),
"user": settings.DATABASES[DEFAULT_DB].get("USER"),
"password": settings.DATABASES[DEFAULT_DB].get("PASSWORD"),
"host": settings.DATABASES[DEFAULT_DB].get("HOST"),
"port": settings.DATABASES[DEFAULT_DB].get("PORT"),
}
DEFAULT_ARGS = []
if PARAMS.get("user"):
DEFAULT_ARGS.append("--username={}".format(PARAMS["user"]))
if PARAMS.get("host"):
DEFAULT_ARGS.append("--host={}".format(PARAMS["host"]))
if PARAMS.get("port"):
DEFAULT_ARGS.append("--port={}".format(PARAMS["port"]))
DEFAULT_ENV = dict(os.environ)
if PARAMS.get("password"):
DEFAULT_ENV["PGPASSWORD"] = PARAMS["password"]
def engine_name():
return "postgres"
def is_installed():
args = ["psql"] + DEFAULT_ARGS
args += ["-qt", "-c \"select * from pg_tables where schemaname='public';\"", PARAMS["db"]]
cmd = " ".join(args)
logger.info(cmd)
output = subprocess.check_output(cmd, env=DEFAULT_ENV, shell=True)
return "core_" in output
def dump(output_fd):
args = ["pg_dump"] + DEFAULT_ARGS
args.append(PARAMS["db"])
cmd = " ".join(args)
logger.info(cmd)
return subprocess.check_call(cmd, stdout=output_fd, env=DEFAULT_ENV, shell=True)
def restore(input_fd):
args = ["psql"] + DEFAULT_ARGS
args.append(PARAMS["db"])
cmd = " ".join(args)
logger.info(cmd)
return subprocess.check_call(cmd, stdin=input_fd, shell=True)
def erase():
args = ["psql"] + DEFAULT_ARGS
args += [PARAMS["db"], "-c 'drop schema public cascade; create schema public;'"]
cmd = " ".join(args)
logger.info(cmd)
subprocess.check_call(cmd, shell=True)
|
gpl-2.0
|
markrawlingson/SickRage
|
lib/unidecode/x052.py
|
253
|
4654
|
data = (
'Dao ', # 0x00
'Diao ', # 0x01
'Dao ', # 0x02
'Ren ', # 0x03
'Ren ', # 0x04
'Chuang ', # 0x05
'Fen ', # 0x06
'Qie ', # 0x07
'Yi ', # 0x08
'Ji ', # 0x09
'Kan ', # 0x0a
'Qian ', # 0x0b
'Cun ', # 0x0c
'Chu ', # 0x0d
'Wen ', # 0x0e
'Ji ', # 0x0f
'Dan ', # 0x10
'Xing ', # 0x11
'Hua ', # 0x12
'Wan ', # 0x13
'Jue ', # 0x14
'Li ', # 0x15
'Yue ', # 0x16
'Lie ', # 0x17
'Liu ', # 0x18
'Ze ', # 0x19
'Gang ', # 0x1a
'Chuang ', # 0x1b
'Fu ', # 0x1c
'Chu ', # 0x1d
'Qu ', # 0x1e
'Ju ', # 0x1f
'Shan ', # 0x20
'Min ', # 0x21
'Ling ', # 0x22
'Zhong ', # 0x23
'Pan ', # 0x24
'Bie ', # 0x25
'Jie ', # 0x26
'Jie ', # 0x27
'Bao ', # 0x28
'Li ', # 0x29
'Shan ', # 0x2a
'Bie ', # 0x2b
'Chan ', # 0x2c
'Jing ', # 0x2d
'Gua ', # 0x2e
'Gen ', # 0x2f
'Dao ', # 0x30
'Chuang ', # 0x31
'Kui ', # 0x32
'Ku ', # 0x33
'Duo ', # 0x34
'Er ', # 0x35
'Zhi ', # 0x36
'Shua ', # 0x37
'Quan ', # 0x38
'Cha ', # 0x39
'Ci ', # 0x3a
'Ke ', # 0x3b
'Jie ', # 0x3c
'Gui ', # 0x3d
'Ci ', # 0x3e
'Gui ', # 0x3f
'Kai ', # 0x40
'Duo ', # 0x41
'Ji ', # 0x42
'Ti ', # 0x43
'Jing ', # 0x44
'Lou ', # 0x45
'Gen ', # 0x46
'Ze ', # 0x47
'Yuan ', # 0x48
'Cuo ', # 0x49
'Xue ', # 0x4a
'Ke ', # 0x4b
'La ', # 0x4c
'Qian ', # 0x4d
'Cha ', # 0x4e
'Chuang ', # 0x4f
'Gua ', # 0x50
'Jian ', # 0x51
'Cuo ', # 0x52
'Li ', # 0x53
'Ti ', # 0x54
'Fei ', # 0x55
'Pou ', # 0x56
'Chan ', # 0x57
'Qi ', # 0x58
'Chuang ', # 0x59
'Zi ', # 0x5a
'Gang ', # 0x5b
'Wan ', # 0x5c
'Bo ', # 0x5d
'Ji ', # 0x5e
'Duo ', # 0x5f
'Qing ', # 0x60
'Yan ', # 0x61
'Zhuo ', # 0x62
'Jian ', # 0x63
'Ji ', # 0x64
'Bo ', # 0x65
'Yan ', # 0x66
'Ju ', # 0x67
'Huo ', # 0x68
'Sheng ', # 0x69
'Jian ', # 0x6a
'Duo ', # 0x6b
'Duan ', # 0x6c
'Wu ', # 0x6d
'Gua ', # 0x6e
'Fu ', # 0x6f
'Sheng ', # 0x70
'Jian ', # 0x71
'Ge ', # 0x72
'Zha ', # 0x73
'Kai ', # 0x74
'Chuang ', # 0x75
'Juan ', # 0x76
'Chan ', # 0x77
'Tuan ', # 0x78
'Lu ', # 0x79
'Li ', # 0x7a
'Fou ', # 0x7b
'Shan ', # 0x7c
'Piao ', # 0x7d
'Kou ', # 0x7e
'Jiao ', # 0x7f
'Gua ', # 0x80
'Qiao ', # 0x81
'Jue ', # 0x82
'Hua ', # 0x83
'Zha ', # 0x84
'Zhuo ', # 0x85
'Lian ', # 0x86
'Ju ', # 0x87
'Pi ', # 0x88
'Liu ', # 0x89
'Gui ', # 0x8a
'Jiao ', # 0x8b
'Gui ', # 0x8c
'Jian ', # 0x8d
'Jian ', # 0x8e
'Tang ', # 0x8f
'Huo ', # 0x90
'Ji ', # 0x91
'Jian ', # 0x92
'Yi ', # 0x93
'Jian ', # 0x94
'Zhi ', # 0x95
'Chan ', # 0x96
'Cuan ', # 0x97
'Mo ', # 0x98
'Li ', # 0x99
'Zhu ', # 0x9a
'Li ', # 0x9b
'Ya ', # 0x9c
'Quan ', # 0x9d
'Ban ', # 0x9e
'Gong ', # 0x9f
'Jia ', # 0xa0
'Wu ', # 0xa1
'Mai ', # 0xa2
'Lie ', # 0xa3
'Jin ', # 0xa4
'Keng ', # 0xa5
'Xie ', # 0xa6
'Zhi ', # 0xa7
'Dong ', # 0xa8
'Zhu ', # 0xa9
'Nu ', # 0xaa
'Jie ', # 0xab
'Qu ', # 0xac
'Shao ', # 0xad
'Yi ', # 0xae
'Zhu ', # 0xaf
'Miao ', # 0xb0
'Li ', # 0xb1
'Jing ', # 0xb2
'Lao ', # 0xb3
'Lao ', # 0xb4
'Juan ', # 0xb5
'Kou ', # 0xb6
'Yang ', # 0xb7
'Wa ', # 0xb8
'Xiao ', # 0xb9
'Mou ', # 0xba
'Kuang ', # 0xbb
'Jie ', # 0xbc
'Lie ', # 0xbd
'He ', # 0xbe
'Shi ', # 0xbf
'Ke ', # 0xc0
'Jing ', # 0xc1
'Hao ', # 0xc2
'Bo ', # 0xc3
'Min ', # 0xc4
'Chi ', # 0xc5
'Lang ', # 0xc6
'Yong ', # 0xc7
'Yong ', # 0xc8
'Mian ', # 0xc9
'Ke ', # 0xca
'Xun ', # 0xcb
'Juan ', # 0xcc
'Qing ', # 0xcd
'Lu ', # 0xce
'Pou ', # 0xcf
'Meng ', # 0xd0
'Lai ', # 0xd1
'Le ', # 0xd2
'Kai ', # 0xd3
'Mian ', # 0xd4
'Dong ', # 0xd5
'Xu ', # 0xd6
'Xu ', # 0xd7
'Kan ', # 0xd8
'Wu ', # 0xd9
'Yi ', # 0xda
'Xun ', # 0xdb
'Weng ', # 0xdc
'Sheng ', # 0xdd
'Lao ', # 0xde
'Mu ', # 0xdf
'Lu ', # 0xe0
'Piao ', # 0xe1
'Shi ', # 0xe2
'Ji ', # 0xe3
'Qin ', # 0xe4
'Qiang ', # 0xe5
'Jiao ', # 0xe6
'Quan ', # 0xe7
'Yang ', # 0xe8
'Yi ', # 0xe9
'Jue ', # 0xea
'Fan ', # 0xeb
'Juan ', # 0xec
'Tong ', # 0xed
'Ju ', # 0xee
'Dan ', # 0xef
'Xie ', # 0xf0
'Mai ', # 0xf1
'Xun ', # 0xf2
'Xun ', # 0xf3
'Lu ', # 0xf4
'Li ', # 0xf5
'Che ', # 0xf6
'Rang ', # 0xf7
'Quan ', # 0xf8
'Bao ', # 0xf9
'Shao ', # 0xfa
'Yun ', # 0xfb
'Jiu ', # 0xfc
'Bao ', # 0xfd
'Gou ', # 0xfe
'Wu ', # 0xff
)
|
gpl-3.0
|
erpletzerp/letzerpcore
|
frappe/model/sync.py
|
4
|
2148
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Sync's doctype and docfields from txt files to database
perms will get synced only if none exist
"""
import frappe
import os
from frappe.modules.import_file import import_file_by_path
from frappe.modules.patch_handler import block_user
from frappe.utils import update_progress_bar
def sync_all(force=0, verbose=False):
block_user(True)
for app in frappe.get_installed_apps():
sync_for(app, force, verbose=verbose)
block_user(False)
frappe.clear_cache()
def sync_for(app_name, force=0, sync_everything = False, verbose=False):
files = []
if app_name == "frappe":
# these need to go first at time of install
for d in (("core", "docfield"), ("core", "docperm"), ("core", "doctype"),
("core", "user"), ("core", "role"), ("custom", "custom_field"),
("custom", "property_setter")):
files.append(os.path.join(frappe.get_app_path("frappe"), d[0],
"doctype", d[1], d[1] + ".json"))
for module_name in frappe.local.app_modules.get(app_name) or []:
folder = os.path.dirname(frappe.get_module(app_name + "." + module_name).__file__)
get_doc_files(files, folder, force, sync_everything, verbose=verbose)
l = len(files)
if l:
for i, doc_path in enumerate(files):
import_file_by_path(doc_path, force=force)
#print module_name + ' | ' + doctype + ' | ' + name
frappe.db.commit()
# show progress bar
update_progress_bar("Updating {0}".format(app_name), i, l)
print ""
def get_doc_files(files, start_path, force=0, sync_everything = False, verbose=False):
"""walk and sync all doctypes and pages"""
document_type = ['doctype', 'page', 'report', 'print_format']
for doctype in document_type:
doctype_path = os.path.join(start_path, doctype)
if os.path.exists(doctype_path):
for docname in os.listdir(doctype_path):
if os.path.isdir(os.path.join(doctype_path, docname)):
doc_path = os.path.join(doctype_path, docname, docname) + ".json"
if os.path.exists(doc_path):
if not doc_path in files:
files.append(doc_path)
|
mit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.