repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
henryk/cyberflex-shell | cards/generic_application.py | 2 | 1588 | import binascii, re, sys, cards
class Application:
# This must be a sequence of regular expressions
# When an application is selected through a matching AID
# then all correponding classes are merged into the card
# object.
# The application classes themselves are responsible for
# unmerging, should their application become deselected.
# However, the default implementation in the generic
# Application class which triggers unmerging on a card reset
# and a successfull SELECT APPLICATION should be sufficient
# in most cases.
# (Still haven't thought this through, though...)
## FIXME Unloading is not implemented yet
#
# NOTE: Implementing classes MUST derive from Application
AID_LIST = []
def load_applications(card, aid):
classes_to_load = []
for i in dir(cards):
possible_class = getattr(cards, i)
if not hasattr(possible_class, "DRIVER_NAME") or not issubclass(possible_class, Application):
continue
if possible_class.can_handle_aid(card, aid):
classes_to_load.append(possible_class)
print ".oO(Loading application '%s')" % ", ".join(possible_class.DRIVER_NAME)
card.add_classes(classes_to_load)
load_applications = staticmethod(load_applications)
def can_handle_aid(cls, card, aid):
for i in cls.AID_LIST:
if re.match(i+"$", binascii.b2a_hex(aid), re.I):
return True
return False
can_handle_aid = classmethod(can_handle_aid)
| gpl-2.0 |
BFriedland/UserDataBase-Heroku | venv/Lib/linecache.py | 97 | 3862 | """Cache lines from files.
This is intended to read lines from modules imported -- hence if a filename
is not found, it will look down the module search path for a file by
that name.
"""
import sys
import os
import tokenize
__all__ = ["getline", "clearcache", "checkcache"]
def getline(filename, lineno, module_globals=None):
lines = getlines(filename, module_globals)
if 1 <= lineno <= len(lines):
return lines[lineno-1]
else:
return ''
# The cache
cache = {} # The cache
def clearcache():
"""Clear the cache entirely."""
global cache
cache = {}
def getlines(filename, module_globals=None):
"""Get the lines for a file from the cache.
Update the cache if it doesn't contain an entry for this file already."""
if filename in cache:
return cache[filename][2]
else:
return updatecache(filename, module_globals)
def checkcache(filename=None):
"""Discard cache entries that are out of date.
(This is not checked upon each call!)"""
if filename is None:
filenames = list(cache.keys())
else:
if filename in cache:
filenames = [filename]
else:
return
for filename in filenames:
size, mtime, lines, fullname = cache[filename]
if mtime is None:
continue # no-op for files loaded via a __loader__
try:
stat = os.stat(fullname)
except OSError:
del cache[filename]
continue
if size != stat.st_size or mtime != stat.st_mtime:
del cache[filename]
def updatecache(filename, module_globals=None):
"""Update a cache entry and return its list of lines.
If something's wrong, print a message, discard the cache entry,
and return an empty list."""
if filename in cache:
del cache[filename]
if not filename or (filename.startswith('<') and filename.endswith('>')):
return []
fullname = filename
try:
stat = os.stat(fullname)
except OSError:
basename = filename
# Try for a __loader__, if available
if module_globals and '__loader__' in module_globals:
name = module_globals.get('__name__')
loader = module_globals['__loader__']
get_source = getattr(loader, 'get_source', None)
if name and get_source:
try:
data = get_source(name)
except (ImportError, OSError):
pass
else:
if data is None:
# No luck, the PEP302 loader cannot find the source
# for this module.
return []
cache[filename] = (
len(data), None,
[line+'\n' for line in data.splitlines()], fullname
)
return cache[filename][2]
# Try looking through the module search path, which is only useful
# when handling a relative filename.
if os.path.isabs(filename):
return []
for dirname in sys.path:
try:
fullname = os.path.join(dirname, basename)
except (TypeError, AttributeError):
# Not sufficiently string-like to do anything useful with.
continue
try:
stat = os.stat(fullname)
break
except OSError:
pass
else:
return []
try:
with tokenize.open(fullname) as fp:
lines = fp.readlines()
except OSError:
return []
if lines and not lines[-1].endswith('\n'):
lines[-1] += '\n'
size, mtime = stat.st_size, stat.st_mtime
cache[filename] = size, mtime, lines, fullname
return lines
| mit |
mezz64/home-assistant | tests/test_bootstrap.py | 6 | 17787 | """Test the bootstrapping."""
# pylint: disable=protected-access
import asyncio
import os
from unittest.mock import Mock
import pytest
from homeassistant import bootstrap, core, runner
import homeassistant.config as config_util
from homeassistant.exceptions import HomeAssistantError
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import (
MockModule,
MockPlatform,
get_test_config_dir,
mock_coro,
mock_entity_platform,
mock_integration,
)
ORIG_TIMEZONE = dt_util.DEFAULT_TIME_ZONE
VERSION_PATH = os.path.join(get_test_config_dir(), config_util.VERSION_FILE)
@pytest.fixture(autouse=True)
def apply_mock_storage(hass_storage):
"""Apply the storage mock."""
@pytest.fixture(autouse=True)
async def apply_stop_hass(stop_hass):
"""Make sure all hass are stopped."""
@pytest.fixture(autouse=True)
def mock_http_start_stop():
"""Mock HTTP start and stop."""
with patch(
"homeassistant.components.http.start_http_server_and_save_config"
), patch("homeassistant.components.http.HomeAssistantHTTP.stop"):
yield
@patch("homeassistant.bootstrap.async_enable_logging", Mock())
async def test_home_assistant_core_config_validation(hass):
"""Test if we pass in wrong information for HA conf."""
# Extensive HA conf validation testing is done
result = await bootstrap.async_from_config_dict(
{"homeassistant": {"latitude": "some string"}}, hass
)
assert result is None
async def test_async_enable_logging(hass):
"""Test to ensure logging is migrated to the queue handlers."""
with patch("logging.getLogger"), patch(
"homeassistant.bootstrap.async_activate_log_queue_handler"
) as mock_async_activate_log_queue_handler:
bootstrap.async_enable_logging(hass)
mock_async_activate_log_queue_handler.assert_called_once()
async def test_load_hassio(hass):
"""Test that we load Hass.io component."""
with patch.dict(os.environ, {}, clear=True):
assert bootstrap._get_domains(hass, {}) == set()
with patch.dict(os.environ, {"HASSIO": "1"}):
assert bootstrap._get_domains(hass, {}) == {"hassio"}
async def test_empty_setup(hass):
"""Test an empty set up loads the core."""
await bootstrap.async_from_config_dict({}, hass)
for domain in bootstrap.CORE_INTEGRATIONS:
assert domain in hass.config.components, domain
async def test_core_failure_loads_safe_mode(hass, caplog):
"""Test failing core setup aborts further setup."""
with patch(
"homeassistant.components.homeassistant.async_setup",
return_value=mock_coro(False),
):
await bootstrap.async_from_config_dict({"group": {}}, hass)
assert "core failed to initialize" in caplog.text
# We aborted early, group not set up
assert "group" not in hass.config.components
async def test_setting_up_config(hass):
"""Test we set up domains in config."""
await bootstrap._async_set_up_integrations(
hass, {"group hello": {}, "homeassistant": {}}
)
assert "group" in hass.config.components
async def test_setup_after_deps_all_present(hass):
"""Test after_dependencies when all present."""
order = []
def gen_domain_setup(domain):
async def async_setup(hass, config):
order.append(domain)
return True
return async_setup
mock_integration(
hass, MockModule(domain="root", async_setup=gen_domain_setup("root"))
)
mock_integration(
hass,
MockModule(
domain="first_dep",
async_setup=gen_domain_setup("first_dep"),
partial_manifest={"after_dependencies": ["root"]},
),
)
mock_integration(
hass,
MockModule(
domain="second_dep",
async_setup=gen_domain_setup("second_dep"),
partial_manifest={"after_dependencies": ["first_dep"]},
),
)
with patch(
"homeassistant.components.logger.async_setup", gen_domain_setup("logger")
):
await bootstrap._async_set_up_integrations(
hass, {"root": {}, "first_dep": {}, "second_dep": {}, "logger": {}}
)
assert "root" in hass.config.components
assert "first_dep" in hass.config.components
assert "second_dep" in hass.config.components
assert order == ["logger", "root", "first_dep", "second_dep"]
async def test_setup_after_deps_in_stage_1_ignored(hass):
"""Test after_dependencies are ignored in stage 1."""
# This test relies on this
assert "cloud" in bootstrap.STAGE_1_INTEGRATIONS
order = []
def gen_domain_setup(domain):
async def async_setup(hass, config):
order.append(domain)
return True
return async_setup
mock_integration(
hass,
MockModule(
domain="normal_integration",
async_setup=gen_domain_setup("normal_integration"),
partial_manifest={"after_dependencies": ["an_after_dep"]},
),
)
mock_integration(
hass,
MockModule(
domain="an_after_dep",
async_setup=gen_domain_setup("an_after_dep"),
),
)
mock_integration(
hass,
MockModule(
domain="cloud",
async_setup=gen_domain_setup("cloud"),
partial_manifest={"after_dependencies": ["normal_integration"]},
),
)
await bootstrap._async_set_up_integrations(
hass, {"cloud": {}, "normal_integration": {}, "an_after_dep": {}}
)
assert "normal_integration" in hass.config.components
assert "cloud" in hass.config.components
assert order == ["cloud", "an_after_dep", "normal_integration"]
async def test_setup_after_deps_via_platform(hass):
"""Test after_dependencies set up via platform."""
order = []
after_dep_event = asyncio.Event()
def gen_domain_setup(domain):
async def async_setup(hass, config):
if domain == "after_dep_of_platform_int":
await after_dep_event.wait()
order.append(domain)
return True
return async_setup
mock_integration(
hass,
MockModule(
domain="after_dep_of_platform_int",
async_setup=gen_domain_setup("after_dep_of_platform_int"),
),
)
mock_integration(
hass,
MockModule(
domain="platform_int",
async_setup=gen_domain_setup("platform_int"),
partial_manifest={"after_dependencies": ["after_dep_of_platform_int"]},
),
)
mock_entity_platform(hass, "light.platform_int", MockPlatform())
@core.callback
def continue_loading(_):
"""When light component loaded, continue other loading."""
after_dep_event.set()
hass.bus.async_listen_once("component_loaded", continue_loading)
await bootstrap._async_set_up_integrations(
hass, {"light": {"platform": "platform_int"}, "after_dep_of_platform_int": {}}
)
assert "light" in hass.config.components
assert "after_dep_of_platform_int" in hass.config.components
assert "platform_int" in hass.config.components
assert order == ["after_dep_of_platform_int", "platform_int"]
async def test_setup_after_deps_not_trigger_load(hass):
"""Test after_dependencies does not trigger loading it."""
order = []
def gen_domain_setup(domain):
async def async_setup(hass, config):
order.append(domain)
return True
return async_setup
mock_integration(
hass, MockModule(domain="root", async_setup=gen_domain_setup("root"))
)
mock_integration(
hass,
MockModule(
domain="first_dep",
async_setup=gen_domain_setup("first_dep"),
partial_manifest={"after_dependencies": ["root"]},
),
)
mock_integration(
hass,
MockModule(
domain="second_dep",
async_setup=gen_domain_setup("second_dep"),
partial_manifest={"after_dependencies": ["first_dep"]},
),
)
await bootstrap._async_set_up_integrations(hass, {"root": {}, "second_dep": {}})
assert "root" in hass.config.components
assert "first_dep" not in hass.config.components
assert "second_dep" in hass.config.components
async def test_setup_after_deps_not_present(hass):
"""Test after_dependencies when referenced integration doesn't exist."""
order = []
def gen_domain_setup(domain):
async def async_setup(hass, config):
order.append(domain)
return True
return async_setup
mock_integration(
hass, MockModule(domain="root", async_setup=gen_domain_setup("root"))
)
mock_integration(
hass,
MockModule(
domain="second_dep",
async_setup=gen_domain_setup("second_dep"),
partial_manifest={"after_dependencies": ["first_dep"]},
),
)
await bootstrap._async_set_up_integrations(
hass, {"root": {}, "first_dep": {}, "second_dep": {}}
)
assert "root" in hass.config.components
assert "first_dep" not in hass.config.components
assert "second_dep" in hass.config.components
assert order == ["root", "second_dep"]
@pytest.fixture
def mock_is_virtual_env():
"""Mock enable logging."""
with patch(
"homeassistant.bootstrap.is_virtual_env", return_value=False
) as is_virtual_env:
yield is_virtual_env
@pytest.fixture
def mock_enable_logging():
"""Mock enable logging."""
with patch("homeassistant.bootstrap.async_enable_logging") as enable_logging:
yield enable_logging
@pytest.fixture
def mock_mount_local_lib_path():
"""Mock enable logging."""
with patch(
"homeassistant.bootstrap.async_mount_local_lib_path"
) as mount_local_lib_path:
yield mount_local_lib_path
@pytest.fixture
def mock_process_ha_config_upgrade():
"""Mock enable logging."""
with patch(
"homeassistant.config.process_ha_config_upgrade"
) as process_ha_config_upgrade:
yield process_ha_config_upgrade
@pytest.fixture
def mock_ensure_config_exists():
"""Mock enable logging."""
with patch(
"homeassistant.config.async_ensure_config_exists", return_value=True
) as ensure_config_exists:
yield ensure_config_exists
async def test_setup_hass(
mock_enable_logging,
mock_is_virtual_env,
mock_mount_local_lib_path,
mock_ensure_config_exists,
mock_process_ha_config_upgrade,
caplog,
loop,
):
"""Test it works."""
verbose = Mock()
log_rotate_days = Mock()
log_file = Mock()
log_no_color = Mock()
with patch(
"homeassistant.config.async_hass_config_yaml",
return_value={"browser": {}, "frontend": {}},
), patch.object(bootstrap, "LOG_SLOW_STARTUP_INTERVAL", 5000):
hass = await bootstrap.async_setup_hass(
runner.RuntimeConfig(
config_dir=get_test_config_dir(),
verbose=verbose,
log_rotate_days=log_rotate_days,
log_file=log_file,
log_no_color=log_no_color,
skip_pip=True,
safe_mode=False,
),
)
assert "Waiting on integrations to complete setup" not in caplog.text
assert "browser" in hass.config.components
assert "safe_mode" not in hass.config.components
assert len(mock_enable_logging.mock_calls) == 1
assert mock_enable_logging.mock_calls[0][1] == (
hass,
verbose,
log_rotate_days,
log_file,
log_no_color,
)
assert len(mock_mount_local_lib_path.mock_calls) == 1
assert len(mock_ensure_config_exists.mock_calls) == 1
assert len(mock_process_ha_config_upgrade.mock_calls) == 1
async def test_setup_hass_takes_longer_than_log_slow_startup(
mock_enable_logging,
mock_is_virtual_env,
mock_mount_local_lib_path,
mock_ensure_config_exists,
mock_process_ha_config_upgrade,
caplog,
loop,
):
"""Test it works."""
verbose = Mock()
log_rotate_days = Mock()
log_file = Mock()
log_no_color = Mock()
async def _async_setup_that_blocks_startup(*args, **kwargs):
await asyncio.sleep(0.6)
return True
with patch(
"homeassistant.config.async_hass_config_yaml",
return_value={"browser": {}, "frontend": {}},
), patch.object(bootstrap, "LOG_SLOW_STARTUP_INTERVAL", 0.3), patch(
"homeassistant.components.frontend.async_setup",
side_effect=_async_setup_that_blocks_startup,
):
await bootstrap.async_setup_hass(
runner.RuntimeConfig(
config_dir=get_test_config_dir(),
verbose=verbose,
log_rotate_days=log_rotate_days,
log_file=log_file,
log_no_color=log_no_color,
skip_pip=True,
safe_mode=False,
),
)
assert "Waiting on integrations to complete setup" in caplog.text
async def test_setup_hass_invalid_yaml(
mock_enable_logging,
mock_is_virtual_env,
mock_mount_local_lib_path,
mock_ensure_config_exists,
mock_process_ha_config_upgrade,
loop,
):
"""Test it works."""
with patch(
"homeassistant.config.async_hass_config_yaml", side_effect=HomeAssistantError
):
hass = await bootstrap.async_setup_hass(
runner.RuntimeConfig(
config_dir=get_test_config_dir(),
verbose=False,
log_rotate_days=10,
log_file="",
log_no_color=False,
skip_pip=True,
safe_mode=False,
),
)
assert "safe_mode" in hass.config.components
assert len(mock_mount_local_lib_path.mock_calls) == 0
async def test_setup_hass_config_dir_nonexistent(
mock_enable_logging,
mock_is_virtual_env,
mock_mount_local_lib_path,
mock_ensure_config_exists,
mock_process_ha_config_upgrade,
loop,
):
"""Test it works."""
mock_ensure_config_exists.return_value = False
assert (
await bootstrap.async_setup_hass(
runner.RuntimeConfig(
config_dir=get_test_config_dir(),
verbose=False,
log_rotate_days=10,
log_file="",
log_no_color=False,
skip_pip=True,
safe_mode=False,
),
)
is None
)
async def test_setup_hass_safe_mode(
mock_enable_logging,
mock_is_virtual_env,
mock_mount_local_lib_path,
mock_ensure_config_exists,
mock_process_ha_config_upgrade,
loop,
):
"""Test it works."""
with patch("homeassistant.components.browser.setup") as browser_setup, patch(
"homeassistant.config_entries.ConfigEntries.async_domains",
return_value=["browser"],
):
hass = await bootstrap.async_setup_hass(
runner.RuntimeConfig(
config_dir=get_test_config_dir(),
verbose=False,
log_rotate_days=10,
log_file="",
log_no_color=False,
skip_pip=True,
safe_mode=True,
),
)
assert "safe_mode" in hass.config.components
assert len(mock_mount_local_lib_path.mock_calls) == 0
# Validate we didn't try to set up config entry.
assert "browser" not in hass.config.components
assert len(browser_setup.mock_calls) == 0
async def test_setup_hass_invalid_core_config(
mock_enable_logging,
mock_is_virtual_env,
mock_mount_local_lib_path,
mock_ensure_config_exists,
mock_process_ha_config_upgrade,
loop,
):
"""Test it works."""
with patch(
"homeassistant.config.async_hass_config_yaml",
return_value={"homeassistant": {"non-existing": 1}},
):
hass = await bootstrap.async_setup_hass(
runner.RuntimeConfig(
config_dir=get_test_config_dir(),
verbose=False,
log_rotate_days=10,
log_file="",
log_no_color=False,
skip_pip=True,
safe_mode=False,
),
)
assert "safe_mode" in hass.config.components
async def test_setup_safe_mode_if_no_frontend(
mock_enable_logging,
mock_is_virtual_env,
mock_mount_local_lib_path,
mock_ensure_config_exists,
mock_process_ha_config_upgrade,
loop,
):
"""Test we setup safe mode if frontend didn't load."""
verbose = Mock()
log_rotate_days = Mock()
log_file = Mock()
log_no_color = Mock()
with patch(
"homeassistant.config.async_hass_config_yaml",
return_value={
"homeassistant": {
"internal_url": "http://192.168.1.100:8123",
"external_url": "https://abcdef.ui.nabu.casa",
},
"map": {},
"person": {"invalid": True},
},
):
hass = await bootstrap.async_setup_hass(
runner.RuntimeConfig(
config_dir=get_test_config_dir(),
verbose=verbose,
log_rotate_days=log_rotate_days,
log_file=log_file,
log_no_color=log_no_color,
skip_pip=True,
safe_mode=False,
),
)
assert "safe_mode" in hass.config.components
assert hass.config.config_dir == get_test_config_dir()
assert hass.config.skip_pip
assert hass.config.internal_url == "http://192.168.1.100:8123"
assert hass.config.external_url == "https://abcdef.ui.nabu.casa"
| apache-2.0 |
rfguri/vimfiles | bundle/ycm/third_party/ycmd/third_party/JediHTTP/vendor/jedi/jedi/evaluate/recursion.py | 1 | 5177 | """
Recursions are the recipe of |jedi| to conquer Python code. However, someone
must stop recursions going mad. Some settings are here to make |jedi| stop at
the right time. You can read more about them :ref:`here <settings-recursion>`.
Next to :mod:`jedi.evaluate.cache` this module also makes |jedi| not
thread-safe. Why? ``execution_recursion_decorator`` uses class variables to
count the function calls.
"""
from jedi import debug
from jedi import settings
from jedi.evaluate import iterable
def recursion_decorator(func):
def run(evaluator, stmt, *args, **kwargs):
rec_detect = evaluator.recursion_detector
# print stmt, len(self.node_statements())
if rec_detect.push_stmt(stmt):
return set()
else:
result = func(evaluator, stmt, *args, **kwargs)
rec_detect.pop_stmt()
return result
return run
class RecursionDetector(object):
"""
A decorator to detect recursions in statements. In a recursion a statement
at the same place, in the same module may not be executed two times.
"""
def __init__(self, evaluator):
self.top = None
self.current = None
self._evaluator = evaluator
def push_stmt(self, stmt):
self.current = _RecursionNode(self._evaluator, stmt, self.current)
check = self._check_recursion()
if check:
debug.warning('catched stmt recursion: %s against %s @%s', stmt,
check.stmt, stmt.start_pos)
self.pop_stmt()
return True
return False
def pop_stmt(self):
if self.current is not None:
# I don't know how current can be None, but sometimes it happens
# with Python3.
self.current = self.current.parent
def _check_recursion(self):
test = self.current
while True:
test = test.parent
if self.current == test:
return test
if not test:
return False
def node_statements(self):
result = []
n = self.current
while n:
result.insert(0, n.stmt)
n = n.parent
return result
class _RecursionNode(object):
""" A node of the RecursionDecorator. """
def __init__(self, evaluator, stmt, parent):
self._evaluator = evaluator
self.script = stmt.get_parent_until()
self.position = stmt.start_pos
self.parent = parent
self.stmt = stmt
# Don't check param instances, they are not causing recursions
# The same's true for the builtins, because the builtins are really
# simple.
self.is_ignored = self.script == self._evaluator.BUILTINS
def __eq__(self, other):
if not other:
return None
return self.script == other.script \
and self.position == other.position \
and not self.is_ignored and not other.is_ignored
def execution_recursion_decorator(func):
def run(execution, **kwargs):
detector = execution._evaluator.execution_recursion_detector
if detector.push_execution(execution):
result = set()
else:
result = func(execution, **kwargs)
detector.pop_execution()
return result
return run
class ExecutionRecursionDetector(object):
"""
Catches recursions of executions.
"""
def __init__(self, evaluator):
self.recursion_level = 0
self.parent_execution_funcs = []
self.execution_funcs = set()
self.execution_count = 0
self._evaluator = evaluator
def __call__(self, execution):
debug.dbg('Execution recursions: %s', execution, self.recursion_level,
self.execution_count, len(self.execution_funcs))
if self.check_recursion(execution):
result = set()
else:
result = self.func(execution)
self.pop_execution()
return result
def pop_execution(self):
self.parent_execution_funcs.pop()
self.recursion_level -= 1
def push_execution(self, execution):
in_par_execution_funcs = execution.base in self.parent_execution_funcs
in_execution_funcs = execution.base in self.execution_funcs
self.recursion_level += 1
self.execution_count += 1
self.execution_funcs.add(execution.base)
self.parent_execution_funcs.append(execution.base)
if self.execution_count > settings.max_executions:
return True
if isinstance(execution.base, (iterable.Array, iterable.Generator)):
return False
module = execution.get_parent_until()
if module == self._evaluator.BUILTINS:
return False
if in_par_execution_funcs:
if self.recursion_level > settings.max_function_recursion_level:
return True
if in_execution_funcs and \
len(self.execution_funcs) > settings.max_until_execution_unique:
return True
if self.execution_count > settings.max_executions_without_builtins:
return True
return False
| mit |
girving/tensorflow | tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py | 23 | 23200 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module for constructing GridRNN cells"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.contrib import layers
from tensorflow.contrib import rnn
class GridRNNCell(rnn.RNNCell):
"""Grid recurrent cell.
This implementation is based on:
http://arxiv.org/pdf/1507.01526v3.pdf
This is the generic implementation of GridRNN. Users can specify arbitrary
number of dimensions,
set some of them to be priority (section 3.2), non-recurrent (section 3.3)
and input/output dimensions (section 3.4).
Weight sharing can also be specified using the `tied` parameter.
Type of recurrent units can be specified via `cell_fn`.
"""
def __init__(self,
num_units,
num_dims=1,
input_dims=None,
output_dims=None,
priority_dims=None,
non_recurrent_dims=None,
tied=False,
cell_fn=None,
non_recurrent_fn=None,
state_is_tuple=True,
output_is_tuple=True):
"""Initialize the parameters of a Grid RNN cell
Args:
num_units: int, The number of units in all dimensions of this GridRNN cell
num_dims: int, Number of dimensions of this grid.
input_dims: int or list, List of dimensions which will receive input data.
output_dims: int or list, List of dimensions from which the output will be
recorded.
priority_dims: int or list, List of dimensions to be considered as
priority dimensions.
If None, no dimension is prioritized.
non_recurrent_dims: int or list, List of dimensions that are not
recurrent.
The transfer function for non-recurrent dimensions is specified
via `non_recurrent_fn`, which is
default to be `tensorflow.nn.relu`.
tied: bool, Whether to share the weights among the dimensions of this
GridRNN cell.
If there are non-recurrent dimensions in the grid, weights are
shared between each group of recurrent and non-recurrent
dimensions.
cell_fn: function, a function which returns the recurrent cell object.
Has to be in the following signature:
```
def cell_func(num_units):
# ...
```
and returns an object of type `RNNCell`. If None, LSTMCell with
default parameters will be used.
Note that if you use a custom RNNCell (with `cell_fn`), it is your
responsibility to make sure the inner cell use `state_is_tuple=True`.
non_recurrent_fn: a tensorflow Op that will be the transfer function of
the non-recurrent dimensions
state_is_tuple: If True, accepted and returned states are tuples of the
states of the recurrent dimensions. If False, they are concatenated
along the column axis. The latter behavior will soon be deprecated.
Note that if you use a custom RNNCell (with `cell_fn`), it is your
responsibility to make sure the inner cell use `state_is_tuple=True`.
output_is_tuple: If True, the output is a tuple of the outputs of the
recurrent dimensions. If False, they are concatenated along the
column axis. The later behavior will soon be deprecated.
Raises:
TypeError: if cell_fn does not return an RNNCell instance.
"""
if not state_is_tuple:
logging.warning('%s: Using a concatenated state is slower and will '
'soon be deprecated. Use state_is_tuple=True.', self)
if not output_is_tuple:
logging.warning('%s: Using a concatenated output is slower and will '
'soon be deprecated. Use output_is_tuple=True.', self)
if num_dims < 1:
raise ValueError('dims must be >= 1: {}'.format(num_dims))
self._config = _parse_rnn_config(num_dims, input_dims, output_dims,
priority_dims, non_recurrent_dims,
non_recurrent_fn or nn.relu, tied,
num_units)
self._state_is_tuple = state_is_tuple
self._output_is_tuple = output_is_tuple
if cell_fn is None:
my_cell_fn = functools.partial(
rnn.LSTMCell, num_units=num_units, state_is_tuple=state_is_tuple)
else:
my_cell_fn = lambda: cell_fn(num_units)
if tied:
self._cells = [my_cell_fn()] * num_dims
else:
self._cells = [my_cell_fn() for _ in range(num_dims)]
if not isinstance(self._cells[0], rnn.RNNCell):
raise TypeError('cell_fn must return an RNNCell instance, saw: %s' %
type(self._cells[0]))
if self._output_is_tuple:
self._output_size = tuple(self._cells[0].output_size
for _ in self._config.outputs)
else:
self._output_size = self._cells[0].output_size * len(self._config.outputs)
if self._state_is_tuple:
self._state_size = tuple(self._cells[0].state_size
for _ in self._config.recurrents)
else:
self._state_size = self._cell_state_size() * len(self._config.recurrents)
@property
def output_size(self):
return self._output_size
@property
def state_size(self):
return self._state_size
def __call__(self, inputs, state, scope=None):
"""Run one step of GridRNN.
Args:
inputs: input Tensor, 2D, batch x input_size. Or None
state: state Tensor, 2D, batch x state_size. Note that state_size =
cell_state_size * recurrent_dims
scope: VariableScope for the created subgraph; defaults to "GridRNNCell".
Returns:
A tuple containing:
- A 2D, batch x output_size, Tensor representing the output of the cell
after reading "inputs" when previous state was "state".
- A 2D, batch x state_size, Tensor representing the new state of the cell
after reading "inputs" when previous state was "state".
"""
conf = self._config
dtype = inputs.dtype
c_prev, m_prev, cell_output_size = self._extract_states(state)
new_output = [None] * conf.num_dims
new_state = [None] * conf.num_dims
with vs.variable_scope(scope or type(self).__name__): # GridRNNCell
# project input, populate c_prev and m_prev
self._project_input(inputs, c_prev, m_prev, cell_output_size > 0)
# propagate along dimensions, first for non-priority dimensions
# then priority dimensions
_propagate(conf.non_priority, conf, self._cells, c_prev, m_prev,
new_output, new_state, True)
_propagate(conf.priority, conf, self._cells,
c_prev, m_prev, new_output, new_state, False)
# collect outputs and states
output_tensors = [new_output[i] for i in self._config.outputs]
if self._output_is_tuple:
output = tuple(output_tensors)
else:
if output_tensors:
output = array_ops.concat(output_tensors, 1)
else:
output = array_ops.zeros([0, 0], dtype)
if self._state_is_tuple:
states = tuple(new_state[i] for i in self._config.recurrents)
else:
# concat each state first, then flatten the whole thing
state_tensors = [
x for i in self._config.recurrents for x in new_state[i]
]
if state_tensors:
states = array_ops.concat(state_tensors, 1)
else:
states = array_ops.zeros([0, 0], dtype)
return output, states
def _extract_states(self, state):
"""Extract the cell and previous output tensors from the given state.
Args:
state: The RNN state.
Returns:
Tuple of the cell value, previous output, and cell_output_size.
Raises:
ValueError: If len(self._config.recurrents) != len(state).
"""
conf = self._config
# c_prev is `m` (cell value), and
# m_prev is `h` (previous output) in the paper.
# Keeping c and m here for consistency with the codebase
c_prev = [None] * conf.num_dims
m_prev = [None] * conf.num_dims
# for LSTM : state = memory cell + output, hence cell_output_size > 0
# for GRU/RNN: state = output (whose size is equal to _num_units),
# hence cell_output_size = 0
total_cell_state_size = self._cell_state_size()
cell_output_size = total_cell_state_size - conf.num_units
if self._state_is_tuple:
if len(conf.recurrents) != len(state):
raise ValueError('Expected state as a tuple of {} '
'element'.format(len(conf.recurrents)))
for recurrent_dim, recurrent_state in zip(conf.recurrents, state):
if cell_output_size > 0:
c_prev[recurrent_dim], m_prev[recurrent_dim] = recurrent_state
else:
m_prev[recurrent_dim] = recurrent_state
else:
for recurrent_dim, start_idx in zip(conf.recurrents,
range(0, self.state_size,
total_cell_state_size)):
if cell_output_size > 0:
c_prev[recurrent_dim] = array_ops.slice(state, [0, start_idx],
[-1, conf.num_units])
m_prev[recurrent_dim] = array_ops.slice(
state, [0, start_idx + conf.num_units], [-1, cell_output_size])
else:
m_prev[recurrent_dim] = array_ops.slice(state, [0, start_idx],
[-1, conf.num_units])
return c_prev, m_prev, cell_output_size
def _project_input(self, inputs, c_prev, m_prev, with_c):
"""Fills in c_prev and m_prev with projected input, for input dimensions.
Args:
inputs: inputs tensor
c_prev: cell value
m_prev: previous output
with_c: boolean; whether to include project_c.
Raises:
ValueError: if len(self._config.input) != len(inputs)
"""
conf = self._config
if (inputs is not None and inputs.get_shape().with_rank(2)[1].value > 0 and
conf.inputs):
if isinstance(inputs, tuple):
if len(conf.inputs) != len(inputs):
raise ValueError('Expect inputs as a tuple of {} '
'tensors'.format(len(conf.inputs)))
input_splits = inputs
else:
input_splits = array_ops.split(
value=inputs, num_or_size_splits=len(conf.inputs), axis=1)
input_sz = input_splits[0].get_shape().with_rank(2)[1].value
for i, j in enumerate(conf.inputs):
input_project_m = vs.get_variable(
'project_m_{}'.format(j), [input_sz, conf.num_units],
dtype=inputs.dtype)
m_prev[j] = math_ops.matmul(input_splits[i], input_project_m)
if with_c:
input_project_c = vs.get_variable(
'project_c_{}'.format(j), [input_sz, conf.num_units],
dtype=inputs.dtype)
c_prev[j] = math_ops.matmul(input_splits[i], input_project_c)
def _cell_state_size(self):
"""Total size of the state of the inner cell used in this grid.
Returns:
Total size of the state of the inner cell.
"""
state_sizes = self._cells[0].state_size
if isinstance(state_sizes, tuple):
return sum(state_sizes)
return state_sizes
"""Specialized cells, for convenience
"""
class Grid1BasicRNNCell(GridRNNCell):
"""1D BasicRNN cell"""
def __init__(self, num_units, state_is_tuple=True, output_is_tuple=True):
super(Grid1BasicRNNCell, self).__init__(
num_units=num_units,
num_dims=1,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=False,
cell_fn=lambda n: rnn.BasicRNNCell(num_units=n),
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2BasicRNNCell(GridRNNCell):
"""2D BasicRNN cell
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
state_is_tuple=True,
output_is_tuple=True):
super(Grid2BasicRNNCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=lambda n: rnn.BasicRNNCell(num_units=n),
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid1BasicLSTMCell(GridRNNCell):
"""1D BasicLSTM cell."""
def __init__(self,
num_units,
forget_bias=1,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.BasicLSTMCell(num_units=n, forget_bias=forget_bias)
super(Grid1BasicLSTMCell, self).__init__(
num_units=num_units,
num_dims=1,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=False,
cell_fn=cell_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2BasicLSTMCell(GridRNNCell):
"""2D BasicLSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
forget_bias=1,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.BasicLSTMCell(num_units=n, forget_bias=forget_bias)
super(Grid2BasicLSTMCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=cell_fn,
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid1LSTMCell(GridRNNCell):
"""1D LSTM cell.
This is different from Grid1BasicLSTMCell because it gives options to
specify the forget bias and enabling peepholes.
"""
def __init__(self,
num_units,
use_peepholes=False,
forget_bias=1.0,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.LSTMCell(
num_units=n, forget_bias=forget_bias, use_peepholes=use_peepholes)
super(Grid1LSTMCell, self).__init__(
num_units=num_units,
num_dims=1,
input_dims=0,
output_dims=0,
priority_dims=0,
cell_fn=cell_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2LSTMCell(GridRNNCell):
"""2D LSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
use_peepholes=False,
forget_bias=1.0,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.LSTMCell(
num_units=n, forget_bias=forget_bias, use_peepholes=use_peepholes)
super(Grid2LSTMCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=cell_fn,
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid3LSTMCell(GridRNNCell):
"""3D BasicLSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
The second and third dimensions are LSTM.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
use_peepholes=False,
forget_bias=1.0,
state_is_tuple=True,
output_is_tuple=True):
def cell_fn(n):
return rnn.LSTMCell(
num_units=n, forget_bias=forget_bias, use_peepholes=use_peepholes)
super(Grid3LSTMCell, self).__init__(
num_units=num_units,
num_dims=3,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=cell_fn,
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
class Grid2GRUCell(GridRNNCell):
"""2D LSTM cell.
This creates a 2D cell which receives input and gives output in the first
dimension.
The first dimension can optionally be non-recurrent if `non_recurrent_fn` is
specified.
"""
def __init__(self,
num_units,
tied=False,
non_recurrent_fn=None,
state_is_tuple=True,
output_is_tuple=True):
super(Grid2GRUCell, self).__init__(
num_units=num_units,
num_dims=2,
input_dims=0,
output_dims=0,
priority_dims=0,
tied=tied,
non_recurrent_dims=None if non_recurrent_fn is None else 0,
cell_fn=lambda n: rnn.GRUCell(num_units=n),
non_recurrent_fn=non_recurrent_fn,
state_is_tuple=state_is_tuple,
output_is_tuple=output_is_tuple)
# Helpers
_GridRNNDimension = namedtuple('_GridRNNDimension', [
'idx', 'is_input', 'is_output', 'is_priority', 'non_recurrent_fn'
])
_GridRNNConfig = namedtuple('_GridRNNConfig',
['num_dims', 'dims', 'inputs', 'outputs',
'recurrents', 'priority', 'non_priority', 'tied',
'num_units'])
def _parse_rnn_config(num_dims, ls_input_dims, ls_output_dims, ls_priority_dims,
ls_non_recurrent_dims, non_recurrent_fn, tied, num_units):
def check_dim_list(ls):
if ls is None:
ls = []
if not isinstance(ls, (list, tuple)):
ls = [ls]
ls = sorted(set(ls))
if any(_ < 0 or _ >= num_dims for _ in ls):
raise ValueError('Invalid dims: {}. Must be in [0, {})'.format(ls,
num_dims))
return ls
input_dims = check_dim_list(ls_input_dims)
output_dims = check_dim_list(ls_output_dims)
priority_dims = check_dim_list(ls_priority_dims)
non_recurrent_dims = check_dim_list(ls_non_recurrent_dims)
rnn_dims = []
for i in range(num_dims):
rnn_dims.append(
_GridRNNDimension(
idx=i,
is_input=(i in input_dims),
is_output=(i in output_dims),
is_priority=(i in priority_dims),
non_recurrent_fn=non_recurrent_fn
if i in non_recurrent_dims else None))
return _GridRNNConfig(
num_dims=num_dims,
dims=rnn_dims,
inputs=input_dims,
outputs=output_dims,
recurrents=[x for x in range(num_dims) if x not in non_recurrent_dims],
priority=priority_dims,
non_priority=[x for x in range(num_dims) if x not in priority_dims],
tied=tied,
num_units=num_units)
def _propagate(dim_indices, conf, cells, c_prev, m_prev, new_output, new_state,
first_call):
"""Propagates through all the cells in dim_indices dimensions.
"""
if len(dim_indices) == 0:
return
# Because of the way RNNCells are implemented, we take the last dimension
# (H_{N-1}) out and feed it as the state of the RNN cell
# (in `last_dim_output`).
# The input of the cell (H_0 to H_{N-2}) are concatenated into `cell_inputs`
if conf.num_dims > 1:
ls_cell_inputs = [None] * (conf.num_dims - 1)
for d in conf.dims[:-1]:
if new_output[d.idx] is None:
ls_cell_inputs[d.idx] = m_prev[d.idx]
else:
ls_cell_inputs[d.idx] = new_output[d.idx]
cell_inputs = array_ops.concat(ls_cell_inputs, 1)
else:
cell_inputs = array_ops.zeros([m_prev[0].get_shape().as_list()[0], 0],
m_prev[0].dtype)
last_dim_output = (new_output[-1]
if new_output[-1] is not None else m_prev[-1])
for i in dim_indices:
d = conf.dims[i]
if d.non_recurrent_fn:
if conf.num_dims > 1:
linear_args = array_ops.concat([cell_inputs, last_dim_output], 1)
else:
linear_args = last_dim_output
with vs.variable_scope('non_recurrent' if conf.tied else
'non_recurrent/cell_{}'.format(i)):
if conf.tied and not (first_call and i == dim_indices[0]):
vs.get_variable_scope().reuse_variables()
new_output[d.idx] = layers.fully_connected(
linear_args,
num_outputs=conf.num_units,
activation_fn=d.non_recurrent_fn,
weights_initializer=(vs.get_variable_scope().initializer or
layers.initializers.xavier_initializer),
weights_regularizer=vs.get_variable_scope().regularizer)
else:
if c_prev[i] is not None:
cell_state = (c_prev[i], last_dim_output)
else:
# for GRU/RNN, the state is just the previous output
cell_state = last_dim_output
with vs.variable_scope('recurrent' if conf.tied else
'recurrent/cell_{}'.format(i)):
if conf.tied and not (first_call and i == dim_indices[0]):
vs.get_variable_scope().reuse_variables()
cell = cells[i]
new_output[d.idx], new_state[d.idx] = cell(cell_inputs, cell_state)
| apache-2.0 |
project-fluxo/fluxo | tests/parallel_issue/runscript.py | 1 | 2214 | import os
import sys
#from helpers import modify_prm # include via file link to ../../../tests/helpers.py
jobs ={}
#SUCCESSFUL!
jobs['master_mhd_para'] ={'exec' :'../../build_master_mhd_para/bin/fluxo',
'param':'parameter_mhd.ini'}
#jobs['mortarmaster_ns_para'] ={'exec' :'../../build_mortarmaster_ns_para/bin/fluxo',
# 'param':'parameter_ns.ini'}
#jobs['linadv_nofix'] ={'exec' :'../../build_linadv_nofix/bin/fluxo',
# 'param':'parameter_linadv.ini'}
#jobs['linadv_fix'] ={'exec' :'../../build_linadv_fix/bin/fluxo',
# 'param':'parameter_linadv.ini'}
#jobs['ns_nopara']={'exec' :'../../build_master_ns_nopara/bin/fluxo',
# 'param':'parameter_ns.ini'}
#jobs['ns_conf_para'] ={'exec' :'../../build_master_ns_para/bin/fluxo',
# 'param':'parameter_ns_conf.ini'}
#jobs['ns_conf_paracons'] ={'exec' :'../../build_master_ns_paracons/bin/fluxo',
# 'param':'parameter_ns_conf.ini'}
#jobs['ns_para_prim_fix'] ={'exec' :'../../build_ns_para_prim_fix/bin/fluxo',
# 'param':'parameter_ns.ini'}
#jobs['ns_para_br2cons_fix'] ={'exec' :'../../build_master_ns_para_br2cons/bin/fluxo',
# 'param':'parameter_ns.ini'}
#jobs['ns_para_fix'] ={'exec' :'../../build_master_ns_para/bin/fluxo',
# 'param':'parameter_ns.ini'}
#unsuccessfull...
#jobs['jesse_ns_para'] ={'exec' :'../../build_jesse_ns_para/bin/fluxo',
# 'param':'parameter_ns.ini'}
#jobs['mortarmaster_off_ns_para'] ={'exec' :'../../build_mortarmaster_off_ns_para/bin/fluxo',
# 'param':'parameter_ns.ini'}
#jobs['ns_paracons'] ={'exec' :'../../build_master_ns_paracons/bin/fluxo',
# 'param':'parameter_ns.ini'}
#jobs['ns_para_br2cons'] ={'exec' :'../../build_master_ns_para_br2cons/bin/fluxo',
# 'param':'parameter_ns.ini'}
procs=["1","2","3"]
for j_name,job in jobs.items():
for proc in procs:
print('running job %s on procs %s ' %(j_name,proc))
logfile='log_'+j_name+'_np'+proc
os.system('mpirun -np '+proc+' '+ job['exec'] +' '+ job['param'] + ' > '+logfile)
| gpl-3.0 |
xydinesh/youtube-dl | youtube_dl/extractor/giantbomb.py | 172 | 2655 | from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..utils import (
unescapeHTML,
qualities,
int_or_none,
)
class GiantBombIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?giantbomb\.com/videos/(?P<display_id>[^/]+)/(?P<id>\d+-\d+)'
_TEST = {
'url': 'http://www.giantbomb.com/videos/quick-look-destiny-the-dark-below/2300-9782/',
'md5': '57badeface303ecf6b98b812de1b9018',
'info_dict': {
'id': '2300-9782',
'display_id': 'quick-look-destiny-the-dark-below',
'ext': 'mp4',
'title': 'Quick Look: Destiny: The Dark Below',
'description': 'md5:0aa3aaf2772a41b91d44c63f30dfad24',
'duration': 2399,
'thumbnail': 're:^https?://.*\.jpg$',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
title = self._og_search_title(webpage)
description = self._og_search_description(webpage)
thumbnail = self._og_search_thumbnail(webpage)
video = json.loads(unescapeHTML(self._search_regex(
r'data-video="([^"]+)"', webpage, 'data-video')))
duration = int_or_none(video.get('lengthSeconds'))
quality = qualities([
'f4m_low', 'progressive_low', 'f4m_high',
'progressive_high', 'f4m_hd', 'progressive_hd'])
formats = []
for format_id, video_url in video['videoStreams'].items():
if format_id == 'f4m_stream':
continue
if video_url.endswith('.f4m'):
f4m_formats = self._extract_f4m_formats(video_url + '?hdcore=3.3.1', display_id)
if f4m_formats:
f4m_formats[0]['quality'] = quality(format_id)
formats.extend(f4m_formats)
else:
formats.append({
'url': video_url,
'format_id': format_id,
'quality': quality(format_id),
})
if not formats:
youtube_id = video.get('youtubeID')
if youtube_id:
return self.url_result(youtube_id, 'Youtube')
self._sort_formats(formats)
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
| unlicense |
bergercookie/Pump3000 | for_keep/final_code/classes_used.py | 1 | 8083 | # Thu May 29 02:59:41 EEST 2014, nickkouk
# proper division for python 2.7
from __future__ import division
# Usual importing stuff for PySide
from PySide.QtGui import *
from PySide.QtCore import *
# Module imports
import sys
import os
from serial.tools.list_ports import comports
# Qt-Designer compiled python code
import python_gui
import python_settings
import history_settings
import device_configuration
import parameters_change
import about_dialog
class ParametersChange(QDialog, parameters_change.Ui_Dialog):
def __init__(self, pump, parent = None):
super(ParametersChange, self).__init__(parent)
self.setupUi(self)
self.pump = pump
def update_pump_param(self):
pairs = [('top_velocity', self.Top_Velocity_Edit_2.text()),
('cutoff_velocity', self.Cutoff_Velocity_Edit_2.text()),
('backlash', self.Backlash_Steps_Edit_2.text()),
('start_velocity', self.Start_Velocity_Edit_2.text()),
('slope', self.SlopeEdit.text())
]
for pair in pairs:
if pair[1].isdigit():
print "Pair[1] {}".format(pair[1])
self.pump.property_set(*pair)
class HistoryDialog(QDialog, history_settings.Ui_Dialog):
def __init__(self, pump, parent = None):
super(HistoryDialog, self).__init__(parent)
self.setupUi(self)
self.pump = pump
self.__appname__ = "Command History"
self.setWindowTitle(self.__appname__)
self.connect(self.refresh_Btn,\
SIGNAL("clicked()"),\
self.refresh)
self.connect(self.commands_only,\
SIGNAL("clicked()"),\
self.refresh)
self.connect(self.clear_history_btn,\
SIGNAL("clicked()"),\
self.clear_history)
def clear_history(self):
self.pump.history = [ ]
self.refresh()
def refresh(self):
wanted = self.pump.history
wanted_string = ''
if self.commands_only.isChecked():
for i in wanted:
wanted_string += "{}\\r\n".format(i[:-1])
else:
for i in range(len(wanted)):
wanted_string += "{0}:\t {1}\\r\n".format(i+1, wanted[i][:-1])
self.history_edit.setPlainText(wanted_string)
class ReportsDialog(QDialog, python_settings.Ui_Dialog):
def __init__(self, pump, window, parent=None):
super(ReportsDialog, self).__init__(parent)
self.setupUi(self)
self.__appname__ = "Reports Screen"
self.setWindowTitle(self.__appname__)
self.pump = pump
self.window = window
self.connect(self.refresh_interval_edit,\
SIGNAL("textEdited(QString)"),\
self.enable_button)
self.connect(self.refresh_now_button,\
SIGNAL("clicked()"),\
self.refresh)
self.refresh_interval_edit.setText("%s"\
% int((self.window.refreshQtimer.interval() / 1000)))
# Setting the refresh interval manually
self.connect(self.refresh_interval_button,\
SIGNAL("clicked()"),\
self.setRefreshTime)
# Enabling the volume button
def enable_button(self):
if self.refresh_interval_edit.text().isdigit():
self.refresh_interval_button.setEnabled(True)
else:
self.refresh_interval_button.setEnabled(False)
def refresh(self):
""" The refresh function shows the pump major statistics.
The refresh function is periodically run using the QTimer refreshQtimer
When the timer timeouts the stats are fetched from the pump
"""
self.window.update_pump_values()
stats = self.pump.status
self.Actual_Position_Edit.setText(stats["actual_pos"])
self.Backlash_Steps_Edit.setText(stats["backlash_steps"])
self.Cutoff_Velocity_Edit.setText(stats["cutoff_vel"])
self.Position_Edit.setText(stats["absolute_pos"])
self.Start_Velocity_Edit.setText(stats["starting_vel"])
self.Top_Velocity_Edit.setText(stats["top_vel"])
self.Checksum_Edit.setText(stats["checksum"])
self.Fluid_Sensor_Edit.setText(stats["fluid_sensor"])
self.Buffer_Status_Edit.setText(stats["buffer_status"])
self.Version_Edit.setText(stats["version"])
def setRefreshTime(self):
text = self.refresh_interval_edit.text()
if text.isdigit():
self.window.refreshQtimer.setInterval(\
eval(text) * 1000)
self.refresh_interval_edit.setText("%s"\
% int((self.window.refreshQtimer.interval() / 1000)))
print "Timer interval Set: {} microseconds".format(eval(text) * 1000)
else:
QMessageBox.warning(self, self.__appname__, "Not a valid input")
self.refresh_interval_edit.selectAll()
def tick_refresh(self):
if self.noRefresh.isChecked():
self.window.cancel_timer()
self.window.refresh_status = False
self.window.scene.setForegroundBrush(\
QBrush(Qt.lightGray, Qt.CrossPattern))
else:
self.window.refresh_status = True
self.window.refreshQtimer.start()
self.window.scene.setForegroundBrush(\
Qt.NoBrush)
class NewDevDialog(QDialog, device_configuration.Ui_Dialog):
def __init__(self, pump, parent=None):
super(NewDevDialog, self).__init__(parent)
self.setupUi(self)
self.__appname__ = "Device Configuration"
self.setWindowTitle(self.__appname__)
self.comports = comports
self.pump = pump
ports_available = list(self.comports())
self.listWidget.addItem('loop://')
for i in range(len(ports_available)):
self.listWidget.addItem(ports_available[i][0])
def connect_with_port(self):
"""Passes the selected item into the connect_new method of the pump."""
port = self.listWidget.currentItem().text()
self.pump.connect_new(port)
class SyringePickDialog(QDialog, device_configuration.Ui_Dialog):
def __init__(self, pump, parent=None):
super(SyringePickDialog, self).__init__(parent)
self.setupUi(self)
self.__appname__ = "Syringe Configuration"
self.setWindowTitle(self.__appname__)
self.pump = pump
syringe_sizes = ['50 micro', '100 micro', '250 micro', '500 micro', \
'1000 micro', '5000 micro']
for i in range(len(syringe_sizes)):
self.listWidget.addItem(syringe_sizes[i])
def select_new_syringe(self):
"""Passes the selected item into the connect_new method of the pump."""
syringe = self.listWidget.currentItem().text().split()[0]
self.pump.syringe_size = syringe
print "syringe is set to {size}".format(size = self.pump.syringe_size)
class AboutDialog(QDialog, about_dialog.Ui_Form):
def __init__(self, text, appname, parent = None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
self.__appname__ = appname
self.setWindowTitle(self.__appname__)
self.text = text
self.load_text()
def load_text(self):
self.textBrowser.setText(self.text)
class AboutDialog2(QDialog, about_dialog.Ui_Form):
def __init__(self, text, appname, parent = None):
super(AboutDialog2, self).__init__(parent)
self.setupUi(self)
self.__appname__ = appname
self.setWindowTitle(self.__appname__)
self.text = text
self.load_text()
self.QtButton = QPushButton("About Qt")
self.horizontalLayout.addWidget(self.QtButton)
self.connect(self.QtButton,\
SIGNAL("clicked()"),\
self.about_qt)
def load_text(self):
self.textBrowser.setText(self.text)
def about_qt(self):
QMessageBox.aboutQt(self, title = "About Qt")
| bsd-2-clause |
mheap/ansible | lib/ansible/modules/network/vyos/vyos_logging.py | 38 | 7851 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: vyos_logging
version_added: "2.4"
author: "Trishna Guha (@trishnaguha)"
short_description: Manage logging on network devices
description:
- This module provides declarative management of logging
on Vyatta Vyos devices.
notes:
- Tested against VYOS 1.1.7
options:
dest:
description:
- Destination of the logs.
choices: ['console', 'file', 'global', 'host', 'user']
name:
description:
- If value of C(dest) is I(file) it indicates file-name,
for I(user) it indicates username and for I(host) indicates
the host name to be notified.
facility:
description:
- Set logging facility.
level:
description:
- Set logging severity levels.
aggregate:
description: List of logging definitions.
state:
description:
- State of the logging configuration.
default: present
choices: ['present', 'absent']
extends_documentation_fragment: vyos
"""
EXAMPLES = """
- name: configure console logging
vyos_logging:
dest: console
facility: all
level: crit
- name: remove console logging configuration
vyos_logging:
dest: console
state: absent
- name: configure file logging
vyos_logging:
dest: file
name: test
facility: local3
level: err
- name: Add logging aggregate
vyos_logging:
aggregate:
- { dest: file, name: test1, facility: all, level: info }
- { dest: file, name: test2, facility: news, level: debug }
state: present
- name: Remove logging aggregate
vyos_logging:
aggregate:
- { dest: console, facility: all, level: info }
- { dest: console, facility: daemon, level: warning }
- { dest: file, name: test2, facility: news, level: debug }
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- set system syslog global facility all level notice
"""
import re
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.vyos.vyos import get_config, load_config
from ansible.module_utils.network.vyos.vyos import vyos_argument_spec
def spec_to_commands(updates, module):
commands = list()
want, have = updates
for w in want:
dest = w['dest']
name = w['name']
facility = w['facility']
level = w['level']
state = w['state']
del w['state']
if state == 'absent' and w in have:
if w['name']:
commands.append('delete system syslog {0} {1} facility {2} level {3}'.format(
dest, name, facility, level))
else:
commands.append('delete system syslog {0} facility {1} level {2}'.format(
dest, facility, level))
elif state == 'present' and w not in have:
if w['name']:
commands.append('set system syslog {0} {1} facility {2} level {3}'.format(
dest, name, facility, level))
else:
commands.append('set system syslog {0} facility {1} level {2}'.format(
dest, facility, level))
return commands
def config_to_dict(module):
data = get_config(module)
obj = []
for line in data.split('\n'):
if line.startswith('set system syslog'):
match = re.search(r'set system syslog (\S+)', line, re.M)
dest = match.group(1)
if dest == 'host':
match = re.search(r'host (\S+)', line, re.M)
name = match.group(1)
elif dest == 'file':
match = re.search(r'file (\S+)', line, re.M)
name = match.group(1)
elif dest == 'user':
match = re.search(r'user (\S+)', line, re.M)
name = match.group(1)
else:
name = None
if 'facility' in line:
match = re.search(r'facility (\S+)', line, re.M)
facility = match.group(1)
if 'level' in line:
match = re.search(r'level (\S+)', line, re.M)
level = match.group(1).strip("'")
obj.append({'dest': dest,
'name': name,
'facility': facility,
'level': level})
return obj
def map_params_to_obj(module, required_if=None):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
module._check_required_if(required_if, item)
obj.append(item.copy())
else:
if module.params['dest'] not in ('host', 'file', 'user'):
module.params['name'] = None
obj.append({
'dest': module.params['dest'],
'name': module.params['name'],
'facility': module.params['facility'],
'level': module.params['level'],
'state': module.params['state']
})
return obj
def main():
""" main entry point for module execution
"""
element_spec = dict(
dest=dict(type='str', choices=['console', 'file', 'global', 'host', 'user']),
name=dict(type='str'),
facility=dict(type='str'),
level=dict(type='str'),
state=dict(default='present', choices=['present', 'absent']),
)
aggregate_spec = deepcopy(element_spec)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(vyos_argument_spec)
required_if = [('dest', 'host', ['name', 'facility', 'level']),
('dest', 'file', ['name', 'facility', 'level']),
('dest', 'user', ['name', 'facility', 'level']),
('dest', 'console', ['facility', 'level']),
('dest', 'global', ['facility', 'level'])]
module = AnsibleModule(argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module, required_if=required_if)
have = config_to_dict(module)
commands = spec_to_commands((want, have), module)
result['commands'] = commands
if commands:
commit = not module.check_mode
load_config(module, commands, commit=commit)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
AuthorizeNet/sample-code-python | CustomerProfiles/get-customer-shipping-address.py | 1 | 2245 | import os, sys
import imp
from authorizenet import apicontractsv1
from authorizenet.apicontrollers import *
constants = imp.load_source('modulename', 'constants.py')
def get_customer_shipping_address(customerProfileId, customerAddressId):
# Give merchant details
merchantAuth = apicontractsv1.merchantAuthenticationType()
merchantAuth.name = constants.apiLoginId
merchantAuth.transactionKey = constants.transactionKey
# create get shipping address request
getShippingAddress = apicontractsv1.getCustomerShippingAddressRequest()
getShippingAddress.merchantAuthentication = merchantAuth
getShippingAddress.customerProfileId = customerProfileId
getShippingAddress.customerAddressId = customerAddressId
# Make the API call
getShippingAddressController = getCustomerShippingAddressController(getShippingAddress)
getShippingAddressController.execute()
response = getShippingAddressController.getresponse()
if response.messages.resultCode == "Ok":
print ("SUCCESS")
if hasattr(response, 'address') == True:
print ("The address is")
print (response.address.firstName +" " + response.address.lastName)
print (response.address.address)
print (response.address.city)
print (response.address.state)
print (response.address.zip)
print (response.address.country)
if not hasattr(response, 'subscriptionIds'):
print ("no subscriptionIds attr in response")
else:
if hasattr(response, 'subscriptionIds') == True:
if hasattr(response.subscriptionIds, 'subscriptionId') == True:
print ("list of subscriptionid:")
for subscriptionid in (response.subscriptionIds.subscriptionId):
print (subscriptionid)
else:
print ("ERROR")
print ("Message code : %s " % response.messages.message[0]['code'].text)
print ("Message text : %s " % response.messages.message[0]['text'].text)
return response
if(os.path.basename(__file__) == os.path.basename(sys.argv[0])):
get_customer_shipping_address(constants.customerProfileId, constants.customerProfileShippingId)
| mit |
gkoelln/youtube-dl | youtube_dl/extractor/liveleak.py | 6 | 7020 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import int_or_none
class LiveLeakIE(InfoExtractor):
_VALID_URL = r'https?://(?:\w+\.)?liveleak\.com/view\?.*?\b[it]=(?P<id>[\w_]+)'
_TESTS = [{
'url': 'http://www.liveleak.com/view?i=757_1364311680',
'md5': '0813c2430bea7a46bf13acf3406992f4',
'info_dict': {
'id': '757_1364311680',
'ext': 'mp4',
'description': 'extremely bad day for this guy..!',
'uploader': 'ljfriel2',
'title': 'Most unlucky car accident',
'thumbnail': r're:^https?://.*\.jpg$'
}
}, {
'url': 'http://www.liveleak.com/view?i=f93_1390833151',
'md5': 'd3f1367d14cc3c15bf24fbfbe04b9abf',
'info_dict': {
'id': 'f93_1390833151',
'ext': 'mp4',
'description': 'German Television Channel NDR does an exclusive interview with Edward Snowden.\r\nUploaded on LiveLeak cause German Television thinks the rest of the world isn\'t intereseted in Edward Snowden.',
'uploader': 'ARD_Stinkt',
'title': 'German Television does first Edward Snowden Interview (ENGLISH)',
'thumbnail': r're:^https?://.*\.jpg$'
}
}, {
# Prochan embed
'url': 'http://www.liveleak.com/view?i=4f7_1392687779',
'md5': '42c6d97d54f1db107958760788c5f48f',
'info_dict': {
'id': '4f7_1392687779',
'ext': 'mp4',
'description': "The guy with the cigarette seems amazingly nonchalant about the whole thing... I really hope my friends' reactions would be a bit stronger.\r\n\r\nAction-go to 0:55.",
'uploader': 'CapObveus',
'title': 'Man is Fatally Struck by Reckless Car While Packing up a Moving Truck',
'age_limit': 18,
},
'skip': 'Video is dead',
}, {
# Covers https://github.com/rg3/youtube-dl/pull/5983
# Multiple resolutions
'url': 'http://www.liveleak.com/view?i=801_1409392012',
'md5': 'c3a449dbaca5c0d1825caecd52a57d7b',
'info_dict': {
'id': '801_1409392012',
'ext': 'mp4',
'description': 'Happened on 27.7.2014. \r\nAt 0:53 you can see people still swimming at near beach.',
'uploader': 'bony333',
'title': 'Crazy Hungarian tourist films close call waterspout in Croatia',
'thumbnail': r're:^https?://.*\.jpg$'
}
}, {
# Covers https://github.com/rg3/youtube-dl/pull/10664#issuecomment-247439521
'url': 'http://m.liveleak.com/view?i=763_1473349649',
'add_ie': ['Youtube'],
'info_dict': {
'id': '763_1473349649',
'ext': 'mp4',
'title': 'Reporters and public officials ignore epidemic of black on asian violence in Sacramento | Colin Flaherty',
'description': 'Colin being the warrior he is and showing the injustice Asians in Sacramento are being subjected to.',
'uploader': 'Ziz',
'upload_date': '20160908',
'uploader_id': 'UCEbta5E_jqlZmEJsriTEtnw'
},
'params': {
'skip_download': True,
},
}, {
'url': 'https://www.liveleak.com/view?i=677_1439397581',
'info_dict': {
'id': '677_1439397581',
'title': 'Fuel Depot in China Explosion caught on video',
},
'playlist_count': 3,
}, {
'url': 'https://www.liveleak.com/view?t=HvHi_1523016227',
'only_matching': True,
}]
@staticmethod
def _extract_urls(webpage):
return re.findall(
r'<iframe[^>]+src="(https?://(?:\w+\.)?liveleak\.com/ll_embed\?[^"]*[if]=[\w_]+[^"]+)"',
webpage)
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_title = self._og_search_title(webpage).replace('LiveLeak.com -', '').strip()
video_description = self._og_search_description(webpage)
video_uploader = self._html_search_regex(
r'By:.*?(\w+)</a>', webpage, 'uploader', fatal=False)
age_limit = int_or_none(self._search_regex(
r'you confirm that you are ([0-9]+) years and over.',
webpage, 'age limit', default=None))
video_thumbnail = self._og_search_thumbnail(webpage)
entries = self._parse_html5_media_entries(url, webpage, video_id)
if not entries:
# Maybe an embed?
embed_url = self._search_regex(
r'<iframe[^>]+src="((?:https?:)?//(?:www\.)?(?:prochan|youtube)\.com/embed[^"]+)"',
webpage, 'embed URL')
return {
'_type': 'url_transparent',
'url': embed_url,
'id': video_id,
'title': video_title,
'description': video_description,
'uploader': video_uploader,
'age_limit': age_limit,
}
for idx, info_dict in enumerate(entries):
for a_format in info_dict['formats']:
if not a_format.get('height'):
a_format['height'] = int_or_none(self._search_regex(
r'([0-9]+)p\.mp4', a_format['url'], 'height label',
default=None))
self._sort_formats(info_dict['formats'])
# Don't append entry ID for one-video pages to keep backward compatibility
if len(entries) > 1:
info_dict['id'] = '%s_%s' % (video_id, idx + 1)
else:
info_dict['id'] = video_id
info_dict.update({
'title': video_title,
'description': video_description,
'uploader': video_uploader,
'age_limit': age_limit,
'thumbnail': video_thumbnail,
})
return self.playlist_result(entries, video_id, video_title)
class LiveLeakEmbedIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?liveleak\.com/ll_embed\?.*?\b(?P<kind>[if])=(?P<id>[\w_]+)'
# See generic.py for actual test cases
_TESTS = [{
'url': 'https://www.liveleak.com/ll_embed?i=874_1459135191',
'only_matching': True,
}, {
'url': 'https://www.liveleak.com/ll_embed?f=ab065df993c1',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
kind, video_id = mobj.group('kind', 'id')
if kind == 'f':
webpage = self._download_webpage(url, video_id)
liveleak_url = self._search_regex(
r'logourl\s*:\s*(?P<q1>[\'"])(?P<url>%s)(?P=q1)' % LiveLeakIE._VALID_URL,
webpage, 'LiveLeak URL', group='url')
elif kind == 'i':
liveleak_url = 'http://www.liveleak.com/view?i=%s' % video_id
return self.url_result(liveleak_url, ie=LiveLeakIE.ie_key())
| unlicense |
drgarcia1986/cookiecutter | cookiecutter/prompt.py | 18 | 3937 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.prompt
---------------------
Functions for prompting the user for project info.
"""
from __future__ import unicode_literals
from collections import OrderedDict
import click
from .compat import iteritems, is_string
from jinja2.environment import Environment
def read_user_variable(var_name, default_value):
"""Prompt the user for the given variable and return the entered value
or the given default.
:param str var_name: Variable of the context to query the user
:param default_value: Value that will be returned if no input happens
"""
# Please see http://click.pocoo.org/4/api/#click.prompt
return click.prompt(var_name, default=default_value)
def read_user_yes_no(question, default_value):
"""Prompt the user to reply with 'yes' or 'no' (or equivalent values).
Note:
Possible choices are 'true', '1', 'yes', 'y' or 'false', '0', 'no', 'n'
:param str question: Question to the user
:param default_value: Value that will be returned if no input happens
"""
# Please see http://click.pocoo.org/4/api/#click.prompt
return click.prompt(
question,
default=default_value,
type=click.BOOL
)
def read_user_choice(var_name, options):
"""Prompt the user to choose from several options for the given variable.
The first item will be returned if no input happens.
:param str var_name: Variable as specified in the context
:param list options: Sequence of options that are available to select from
:return: Exactly one item of ``options`` that has been chosen by the user
"""
# Please see http://click.pocoo.org/4/api/#click.prompt
if not isinstance(options, list):
raise TypeError
if not options:
raise ValueError
choice_map = OrderedDict(
('{}'.format(i), value) for i, value in enumerate(options, 1)
)
choices = choice_map.keys()
default = '1'
choice_lines = ['{} - {}'.format(*c) for c in choice_map.items()]
prompt = '\n'.join((
'Select {}:'.format(var_name),
'\n'.join(choice_lines),
'Choose from {}'.format(', '.join(choices))
))
user_choice = click.prompt(
prompt, type=click.Choice(choices), default=default
)
return choice_map[user_choice]
def render_variable(env, raw, cookiecutter_dict):
if not is_string(raw):
raw = str(raw)
template = env.from_string(raw)
rendered_template = template.render(cookiecutter=cookiecutter_dict)
return rendered_template
def prompt_choice_for_config(cookiecutter_dict, env, key, options, no_input):
"""Prompt the user which option to choose from the given. Each of the
possible choices is rendered beforehand.
"""
rendered_options = [
render_variable(env, raw, cookiecutter_dict) for raw in options
]
if no_input:
return rendered_options[0]
return read_user_choice(key, rendered_options)
def prompt_for_config(context, no_input=False):
"""
Prompts the user to enter new config, using context as a source for the
field names and sample values.
:param no_input: Prompt the user at command line for manual configuration?
"""
cookiecutter_dict = {}
env = Environment()
for key, raw in iteritems(context['cookiecutter']):
if key.startswith('_'):
cookiecutter_dict[key] = raw
continue
if isinstance(raw, list):
# We are dealing with a choice variable
val = prompt_choice_for_config(
cookiecutter_dict, env, key, raw, no_input
)
else:
# We are dealing with a regular variable
val = render_variable(env, raw, cookiecutter_dict)
if not no_input:
val = read_user_variable(key, val)
cookiecutter_dict[key] = val
return cookiecutter_dict
| bsd-3-clause |
daveisadork/PyDevNS | tests/test_config.py | 1 | 2071 | import pytest
from six import iteritems
def test_config_defaults(config):
for key, value in iteritems(config.DEFAULTS):
assert getattr(config, key) == value
@pytest.mark.parametrize("address", ("127.0.0.1", "10.10.10.10"))
def test_config_address(config, address):
config.address = address
assert config.address == address
@pytest.mark.parametrize(
"domains, expected",
[
(["local.dev"], ("local.dev", )),
(["local.dev", "local.co"], ("local.dev", "local.co")),
(("local.dev", "local.co"), ("local.dev", "local.co")),
("local.dev", ("local.dev", )),
]
)
def test_config_domains(config, domains, expected):
config.domains = domains
assert config.domains == expected
@pytest.mark.parametrize("host", ("127.0.0.1", "10.10.10.10"))
def test_config_host(config, host):
config.host = host
assert config.host == host
@pytest.mark.parametrize("level", range(10, 60, 10))
def test_config_log_level(logger, config, level):
config.log_level = level
assert config.log_level == level
assert logger.getEffectiveLevel() == level
@pytest.mark.parametrize("port", (0, 53, 53535))
def test_config_port(config, port):
config.port = port
assert config.port == port
@pytest.mark.parametrize("ttl", (60, 600, 3600))
def test_config_ttl(config, ttl):
config.ttl = ttl
assert config.ttl == ttl
@pytest.mark.parametrize("resolver", (True, False))
def test_config_resolver(config, resolver):
config.resolver = resolver
assert config.resolver == resolver
@pytest.mark.parametrize("resolver_dir", [
"/usr/local/etc/resolver",
"~/.config/resolver",
])
def test_parse_args_resolver_dir(config, resolver_dir):
config.resolver_dir = resolver_dir
assert config.resolver_dir == resolver_dir
@pytest.mark.parametrize(
"verbosity, level", [(0, 40), (1, 30), (2, 20), (3, 10)]
)
def test_config_verbosity(logger, config, verbosity, level):
config.verbosity = verbosity
assert config.log_level == level
assert config.verbosity == verbosity
| mit |
museomix/2013_Quebec_thermoscope | raspberry/pygame-1.9.1release/examples/pixelarray.py | 9 | 3026 | #!/usr/bin/env python
import os, pygame
from pygame.compat import xrange_
main_dir = os.path.split(os.path.abspath(__file__))[0]
data_dir = os.path.join(main_dir, 'data')
def show (image):
screen = pygame.display.get_surface()
screen.fill ((255, 255, 255))
screen.blit (image, (0, 0))
pygame.display.flip ()
while 1:
event = pygame.event.wait ()
if event.type == pygame.QUIT:
raise SystemExit
if event.type == pygame.MOUSEBUTTONDOWN:
break
def main():
pygame.init ()
pygame.display.set_mode ((255, 255))
surface = pygame.Surface ((255, 255))
pygame.display.flip ()
# Create the PixelArray.
ar = pygame.PixelArray (surface)
r, g, b = 0, 0, 0
# Do some easy gradient effect.
for y in xrange_ (255):
r, g, b = y, y, y
ar[:,y] = (r, g, b)
del ar
show (surface)
# We have made some gradient effect, now flip it.
ar = pygame.PixelArray (surface)
ar[:] = ar[:,::-1]
del ar
show (surface)
# Every second column will be made blue
ar = pygame.PixelArray (surface)
ar[::2] = (0, 0, 255)
del ar
show (surface)
# Every second row will be made green
ar = pygame.PixelArray (surface)
ar[:,::2] = (0, 255, 0)
del ar
show (surface)
# Manipulate the image. Flip it around the y axis.
surface = pygame.image.load (os.path.join (data_dir, 'arraydemo.bmp'))
ar = pygame.PixelArray (surface)
ar[:] = ar[:,::-1]
del ar
show (surface)
# Flip the image around the x axis.
ar = pygame.PixelArray (surface)
ar[:] = ar[::-1,:]
del ar
show (surface)
# Every second column will be made white.
ar = pygame.PixelArray (surface)
ar[::2] = (255, 255, 255)
del ar
show (surface)
# Flip the image around both axes, restoring it's original layout.
ar = pygame.PixelArray (surface)
ar[:] = ar[::-1,::-1]
del ar
show (surface)
# Scale it by throwing each second pixel away.
surface = pygame.image.load (os.path.join (data_dir, 'arraydemo.bmp'))
ar = pygame.PixelArray (surface)
sf2 = ar[::2,::2].make_surface ()
del ar
show (sf2)
# Replace anything looking like the blue color from the text.
ar = pygame.PixelArray (surface)
ar.replace ((60, 60, 255), (0, 255, 0), 0.06)
del ar
show (surface)
# Extract anything which might be somewhat black.
surface = pygame.image.load (os.path.join (data_dir, 'arraydemo.bmp'))
ar = pygame.PixelArray (surface)
ar2 = ar.extract ((0, 0, 0), 0.07)
sf2 = ar2.surface
del ar, ar2
show (sf2)
# Compare two images.
surface = pygame.image.load (os.path.join (data_dir, 'alien1.gif'))
surface2 = pygame.image.load (os.path.join (data_dir, 'alien2.gif'))
ar1 = pygame.PixelArray (surface)
ar2 = pygame.PixelArray (surface2)
ar3 = ar1.compare (ar2, 0.07)
sf3 = ar3.surface
del ar1, ar2, ar3
show (sf3)
if __name__ == '__main__':
main()
| mit |
brome-hq/brome | example/configs/test_dict.py | 2 | 1064 | test_dict = {
'#1': 'The header appear after the page is loaded',
'#2': 'The class "completed" appear when a todo is checked',
'#3': 'The class "completed" does not appear when a todo is unchecked',
'#4': 'The todo react well to multiple check and uncheck',
'#5': 'The todo is removed after we click on the delete button',
'#6': 'No todo is left after we delete all of them',
'#7': 'All the completed todo are removed after we click on the clear completed todo; The uncompleted todos remains visible.', # noqa
'#8': 'Adding a todo increment the todo count',
'#9': 'Completing a todo decrement the todo count',
'#10': 'Deleting a todo decrement the todo count',
'#11': 'Uncheck a todo increment the todo count',
'#12': 'After having toggle the all todos all the todos are either completed or uncompleted', # noqa
'#13': '"Active" filter show only uncompleted todos',
'#14': '"Completed" filter show only completed todos',
'#15': '"All" filter show all todos',
'#16': '"All" filter show all todos'
}
| isc |
shashank971/edx-platform | openedx/core/djangoapps/credit/tests/test_partition.py | 76 | 7030 | # -*- coding: utf-8 -*-
"""
Tests for In-Course Reverification Access Control Partition scheme
"""
import ddt
import unittest
from django.conf import settings
from lms.djangoapps.verify_student.models import (
VerificationCheckpoint,
VerificationStatus,
SkippedReverification,
)
from openedx.core.djangoapps.credit.partition_schemes import VerificationPartitionScheme
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.partitions.partitions import UserPartition, Group
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@ddt.ddt
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class ReverificationPartitionTest(ModuleStoreTestCase):
"""Tests for the Reverification Partition Scheme. """
SUBMITTED = "submitted"
APPROVED = "approved"
DENIED = "denied"
def setUp(self):
super(ReverificationPartitionTest, self).setUp()
# creating course, checkpoint location and user partition mock object.
self.course = CourseFactory.create()
self.checkpoint_location = u'i4x://{org}/{course}/edx-reverification-block/first_uuid'.format(
org=self.course.id.org, course=self.course.id.course
)
scheme = UserPartition.get_scheme("verification")
self.user_partition = UserPartition(
id=0,
name=u"Verification Checkpoint",
description=u"Verification Checkpoint",
scheme=scheme,
parameters={"location": self.checkpoint_location},
groups=[
Group(scheme.ALLOW, "Allow access to content"),
Group(scheme.DENY, "Deny access to content"),
]
)
self.first_checkpoint = VerificationCheckpoint.objects.create(
course_id=self.course.id,
checkpoint_location=self.checkpoint_location
)
def create_user_and_enroll(self, enrollment_type):
"""Create and enroll users with provided enrollment type."""
user = UserFactory.create()
CourseEnrollment.objects.create(
user=user,
course_id=self.course.id,
mode=enrollment_type,
is_active=True
)
return user
def add_verification_status(self, user, status):
"""Adding the verification status for a user."""
VerificationStatus.add_status_from_checkpoints(
checkpoints=[self.first_checkpoint],
user=user,
status=status
)
@ddt.data(
("verified", SUBMITTED, VerificationPartitionScheme.ALLOW),
("verified", APPROVED, VerificationPartitionScheme.ALLOW),
("verified", DENIED, VerificationPartitionScheme.ALLOW),
("verified", None, VerificationPartitionScheme.DENY),
("honor", None, VerificationPartitionScheme.ALLOW),
)
@ddt.unpack
def test_get_group_for_user(self, enrollment_type, verification_status, expected_group):
# creating user and enroll them.
user = self.create_user_and_enroll(enrollment_type)
if verification_status:
self.add_verification_status(user, verification_status)
self._assert_group_assignment(user, expected_group)
def test_get_group_for_user_with_skipped(self):
# Check that a user is in verified allow group if that user has skipped
# any ICRV block.
user = self.create_user_and_enroll('verified')
SkippedReverification.add_skipped_reverification_attempt(
checkpoint=self.first_checkpoint,
user_id=user.id,
course_id=self.course.id
)
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
def test_cache_with_skipped_icrv(self):
# Check that a user is in verified allow group if that user has skipped
# any ICRV block.
user = self.create_user_and_enroll('verified')
SkippedReverification.add_skipped_reverification_attempt(
checkpoint=self.first_checkpoint,
user_id=user.id,
course_id=self.course.id
)
# this will warm the cache.
with self.assertNumQueries(3):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
# no db queries this time.
with self.assertNumQueries(0):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
def test_cache_with_submitted_status(self):
# Check that a user is in verified allow group if that user has approved status at
# any ICRV block.
user = self.create_user_and_enroll('verified')
self.add_verification_status(user, VerificationStatus.APPROVED_STATUS)
# this will warm the cache.
with self.assertNumQueries(4):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
# no db queries this time.
with self.assertNumQueries(0):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
def test_cache_with_denied_status(self):
# Check that a user is in verified allow group if that user has denied at
# any ICRV block.
user = self.create_user_and_enroll('verified')
self.add_verification_status(user, VerificationStatus.DENIED_STATUS)
# this will warm the cache.
with self.assertNumQueries(4):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
# no db queries this time.
with self.assertNumQueries(0):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
def test_cache_with_honor(self):
# Check that a user is in honor mode.
# any ICRV block.
user = self.create_user_and_enroll('honor')
# this will warm the cache.
with self.assertNumQueries(3):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
# no db queries this time.
with self.assertNumQueries(0):
self._assert_group_assignment(user, VerificationPartitionScheme.ALLOW)
def test_cache_with_verified_deny_group(self):
# Check that a user is in verified mode. But not perform any action
user = self.create_user_and_enroll('verified')
# this will warm the cache.
with self.assertNumQueries(3):
self._assert_group_assignment(user, VerificationPartitionScheme.DENY)
# no db queries this time.
with self.assertNumQueries(0):
self._assert_group_assignment(user, VerificationPartitionScheme.DENY)
def _assert_group_assignment(self, user, expected_group_id):
"""Check that the user was assigned to a group. """
actual_group = VerificationPartitionScheme.get_group_for_user(self.course.id, user, self.user_partition)
self.assertEqual(actual_group.id, expected_group_id)
| agpl-3.0 |
twobraids/socorro | socorro/unittest/processor/test_breakpad_transform_rules.py | 8 | 40638 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import copy
import ujson
from mock import Mock, patch
from nose.tools import eq_, ok_
from contextlib import contextmanager
from configman.dotdict import DotDict as CDotDict
from socorro.unittest.testbase import TestCase
from socorro.lib.util import DotDict
from socorro.processor.breakpad_transform_rules import (
BreakpadStackwalkerRule,
BreakpadStackwalkerRule2015,
CrashingThreadRule,
ExternalProcessRule,
DumpLookupExternalRule,
JitCrashCategorizeRule
)
canonical_standard_raw_crash = DotDict({
"uuid": '00000000-0000-0000-0000-000002140504',
"InstallTime": "1335439892",
"AdapterVendorID": "0x1002",
"TotalVirtualMemory": "4294836224",
"Comments": "why did my browser crash? #fail",
"Theme": "classic/1.0",
"Version": "12.0",
"Email": "[email protected]",
"Vendor": "Mozilla",
"EMCheckCompatibility": "true",
"Throttleable": "1",
"id": "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
"buildid": "20120420145725",
"AvailablePageFile": "10641510400",
"version": "12.0",
"AdapterDeviceID": "0x7280",
"ReleaseChannel": "release",
"submitted_timestamp": "2012-05-08T23:26:33.454482+00:00",
"URL": "http://www.mozilla.com",
"timestamp": 1336519593.454627,
"Notes": "AdapterVendorID: 0x1002, AdapterDeviceID: 0x7280, "
"AdapterSubsysID: 01821043, "
"AdapterDriverVersion: 8.593.100.0\nD3D10 Layers? D3D10 "
"Layers- D3D9 Layers? D3D9 Layers- ",
"CrashTime": "1336519554",
"Winsock_LSP": "MSAFD Tcpip [TCP/IPv6] : 2 : 1 : \n "
"MSAFD Tcpip [UDP/IPv6] : 2 : 2 : "
"%SystemRoot%\\system32\\mswsock.dll \n "
"MSAFD Tcpip [RAW/IPv6] : 2 : 3 : \n "
"MSAFD Tcpip [TCP/IP] : 2 : 1 : "
"%SystemRoot%\\system32\\mswsock.dll \n "
"MSAFD Tcpip [UDP/IP] : 2 : 2 : \n "
"MSAFD Tcpip [RAW/IP] : 2 : 3 : "
"%SystemRoot%\\system32\\mswsock.dll \n "
"\u041f\u043e\u0441\u0442\u0430\u0432\u0449\u0438\u043a "
"\u0443\u0441\u043b\u0443\u0433 RSVP TCPv6 : 2 : 1 : \n "
"\u041f\u043e\u0441\u0442\u0430\u0432\u0449\u0438\u043a "
"\u0443\u0441\u043b\u0443\u0433 RSVP TCP : 2 : 1 : "
"%SystemRoot%\\system32\\mswsock.dll \n "
"\u041f\u043e\u0441\u0442\u0430\u0432\u0449\u0438\u043a "
"\u0443\u0441\u043b\u0443\u0433 RSVP UDPv6 : 2 : 2 : \n "
"\u041f\u043e\u0441\u0442\u0430\u0432\u0449\u0438\u043a "
"\u0443\u0441\u043b\u0443\u0433 RSVP UDP : 2 : 2 : "
"%SystemRoot%\\system32\\mswsock.dll",
"FramePoisonBase": "00000000f0de0000",
"AvailablePhysicalMemory": "2227773440",
"FramePoisonSize": "65536",
"StartupTime": "1336499438",
"Add-ons": "[email protected]:0.3,"
"dmpluginff%40westbyte.com:1%2C4.8,"
"[email protected]:1.9.1,"
"[email protected]:2.4,"
"[email protected]:1.0,"
"[email protected]:2.1,"
"{a0d7ccb3-214d-498b-b4aa-0e8fda9a7bf7}:20111107,"
"{d10d0bf8-f5b5-c8b4-a8b2-2b9879e08c5d}:2.0.3,"
"[email protected]:2.4.6.4,"
"{972ce4c6-7e08-4474-a285-3208198ce6fd}:12.0,"
"[email protected]:1.2.1",
"BuildID": "20120420145725",
"SecondsSinceLastCrash": "86985",
"ProductName": "Firefox",
"legacy_processing": 0,
"AvailableVirtualMemory": "3812708352",
"SystemMemoryUsePercentage": "48",
"ProductID": "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
"Distributor": "Mozilla",
"Distributor_version": "12.0",
})
cannonical_stackwalker_output = {
u'crash_info': {
u'address': u'0x0',
u'crashing_thread': 0,
u'type': u'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS'
},
u'crashing_thread': {
u'frames': [
{
u'file': u'hg:hg.mozilla.org/releases/mozilla-release:'
'memory/mozjemalloc/jemalloc.c:44234f451065',
u'frame': 0,
u'function': u'arena_malloc',
u'function_offset': u'0x1e3',
u'line': 3067,
u'module': u'libmozglue.dylib',
u'module_offset': u'0x7883',
u'offset': u'0x10000e883',
u'registers': {
u'r10': u'0x0000000000000003',
u'r11': u'0x0000000117fa0400',
u'r12': u'0x0000000000000020',
u'r13': u'0x0000000100200210',
u'r14': u'0x0000000000000000',
u'r15': u'0x0000000100200040',
u'r8': u'0x0000000100200040',
u'r9': u'0x000000000000000e',
u'rax': u'0x0000000100200220',
u'rbp': u'0x0000000000000020',
u'rbx': u'0x0000000000000020',
u'rcx': u'0x0000000000000000',
u'rdi': u'0x0000000100200218',
u'rdx': u'0x0000000000000000',
u'rip': u'0x000000010000e883',
u'rsi': u'0x0000000000000020',
u'rsp': u'0x00007fff5fbfc170'
},
u'trust': u'context'
},
{
u'file': u'hg:hg.mozilla.org/releases/mozilla-release:'
'memory/mozjemalloc/jemalloc.c:44234f451065',
u'frame': 1,
u'function': u'je_realloc',
u'function_offset': u'0x5a1',
u'line': 4752,
u'module': u'libmozglue.dylib',
u'module_offset': u'0x2141',
u'offset': u'0x100009141',
u'trust': u'cfi'
},
{
u'frame': 2,
u'function': u'malloc_zone_realloc',
u'function_offset': u'0x5b',
u'module': u'libSystem.B.dylib',
u'module_offset': u'0x8b7a',
u'offset': u'0x7fff82a27b7a',
u'trust': u'context'
},
{
u'file': u'hg:hg.mozilla.org/releases/mozilla-release'
':memory/mozjemalloc/jemalloc.c:44234f451065',
u'frame': 1,
u'function': u'je_realloc',
u'function_offset': u'0x5a1',
u'line': 4752,
u'module': u'libmozglue.dylib',
u'module_offset': u'0x2141',
u'offset': u'0x100009141',
u'trust': u'cfi'
},
{
u'frame': 2,
u'function': u'malloc_zone_realloc',
u'function_offset': u'0x5b',
u'module': u'libSystem.B.dylib',
u'module_offset': u'0x8b7a',
u'offset': u'0x7fff82a27b7a',
}
],
},
u'status': u'OK',
u'system_info': {
u'cpu_arch': u'amd64',
u'cpu_count': 2,
u'cpu_info': u'family 6 model 23 stepping 10',
u'os': u'Mac OS X',
u'os_ver': u'10.6.8 10K549'
},
u'thread_count': 48,
# ...
}
cannonical_stackwalker_output_str = ujson.dumps(cannonical_stackwalker_output)
#==============================================================================
class MyBreakpadStackwalkerRule(BreakpadStackwalkerRule):
@contextmanager
def _temp_raw_crash_json_file(self, raw_crash, crash_id):
yield "%s.json" % raw_crash.uuid
#==============================================================================
class MyBreakpadStackwalkerRule2015(BreakpadStackwalkerRule2015):
@contextmanager
def _temp_raw_crash_json_file(self, raw_crash, crash_id):
yield "%s.json" % raw_crash.uuid
#==============================================================================
class TestBreakpadTransformRule(TestCase):
#--------------------------------------------------------------------------
def get_basic_config(self):
config = CDotDict()
config.logger = Mock()
config.chatty = True
config.dump_field = 'upload_file_minidump'
config.stackwalk_command_line = (
'timeout -s KILL 30 $minidump_stackwalk_pathname '
'--raw-json $rawfilePathname $dumpfilePathname '
'$processor_symbols_pathname_list 2>/dev/null'
)
config.minidump_stackwalk_pathname = '/bin/stackwalker'
config.processor_symbols_pathname_list = (
'/mnt/socorro/symbols/symbols_ffx,'
'/mnt/socorro/symbols/symbols_sea,'
'/mnt/socorro/symbols/symbols_tbrd,'
'/mnt/socorro/symbols/symbols_sbrd,'
'/mnt/socorro/symbols/symbols_os'
)
config.symbol_cache_path = '/mnt/socorro/symbols'
return config
#--------------------------------------------------------------------------
def get_basic_processor_meta(self):
processor_meta = DotDict()
processor_meta.processor_notes = []
processor_meta.quit_check = lambda: False
return processor_meta
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_everything_we_hoped_for(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
cannonical_stackwalker_output_str
)
mocked_subprocess_handle.wait.return_value = 0
rule = MyBreakpadStackwalkerRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.json_dump, cannonical_stackwalker_output)
eq_(processed_crash.mdsw_return_code, 0)
eq_(processed_crash.mdsw_status_string, "OK")
ok_(processed_crash.success)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_stackwalker_fails(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = \
mocked_subprocess_module.Popen.return_value
mocked_subprocess_handle.stdout.read.return_value = '{}'
mocked_subprocess_handle.wait.return_value = 124
rule = MyBreakpadStackwalkerRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.json_dump, {})
eq_(processed_crash.mdsw_return_code, 124)
eq_(processed_crash.mdsw_status_string, "unknown error")
ok_(not processed_crash.success)
eq_(
processor_meta.processor_notes,
["MDSW terminated with SIGKILL due to timeout", ]
)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_stackwalker_fails_2(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = int
mocked_subprocess_handle.wait.return_value = -1
rule = MyBreakpadStackwalkerRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.json_dump, {})
eq_(processed_crash.mdsw_return_code, -1)
eq_(processed_crash.mdsw_status_string, "unknown error")
ok_(not processed_crash.success)
eq_(
processor_meta.processor_notes,
[
"MDSW output failed in json: Expected String or Unicode",
"MDSW failed on 'upload_file_minidump': unknown error"
]
)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.os.unlink')
def test_temp_file_context(self, mocked_unlink):
config = self.get_basic_config()
rule = BreakpadStackwalkerRule(config)
with rule._temp_file_context('foo.TEMPORARY.txt'):
pass
mocked_unlink.assert_called_once_with('foo.TEMPORARY.txt')
mocked_unlink.reset_mock()
with rule._temp_file_context('foo.txt'):
pass
eq_(mocked_unlink.call_count, 0)
mocked_unlink.reset_mock()
try:
with rule._temp_file_context('foo.TEMPORARY.txt'):
raise KeyError('oops')
except KeyError:
pass
mocked_unlink.assert_called_once_with('foo.TEMPORARY.txt')
mocked_unlink.reset_mock()
try:
with rule._temp_file_context('foo.txt'):
raise KeyError('oops')
except KeyError:
pass
eq_(mocked_unlink.call_count, 0)
#==============================================================================
class TestCrashingThreadRule(TestCase):
#--------------------------------------------------------------------------
def get_basic_config(self):
config = CDotDict()
config.logger = Mock()
config.chatty = True
return config
#--------------------------------------------------------------------------
def get_basic_processor_meta(self):
processor_meta = DotDict()
processor_meta.processor_notes = []
processor_meta.quit_check = lambda: False
return processor_meta
#--------------------------------------------------------------------------
def test_everything_we_hoped_for(self):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {}
processed_crash = DotDict()
processed_crash.json_dump = copy.copy(cannonical_stackwalker_output)
processor_meta = self.get_basic_processor_meta()
rule = CrashingThreadRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.crashedThread, 0)
#--------------------------------------------------------------------------
def test_stuff_missing(self):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {}
processed_crash = DotDict()
processed_crash.json_dump = {}
processor_meta = self.get_basic_processor_meta()
rule = CrashingThreadRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.crashedThread, None)
eq_(
processor_meta.processor_notes,
['MDSW did not identify the crashing thread']
)
cannonical_external_output = {
"key": "value"
}
cannonical_external_output_str = ujson.dumps(cannonical_external_output)
#==============================================================================
class TestExternalProcessRule(TestCase):
#--------------------------------------------------------------------------
def get_basic_config(self):
config = CDotDict()
config.logger = Mock()
config.chatty = True
config.dump_field = 'upload_file_minidump'
config.command_line = (
'timeout -s KILL 30 {command_pathname} '
'{dump_file_pathname} '
'{processor_symbols_pathname_list} 2>/dev/null'
)
config.command_pathname = 'bogus_command'
config.processor_symbols_pathname_list = (
'/mnt/socorro/symbols/symbols_ffx,'
'/mnt/socorro/symbols/symbols_sea,'
'/mnt/socorro/symbols/symbols_tbrd,'
'/mnt/socorro/symbols/symbols_sbrd,'
'/mnt/socorro/symbols/symbols_os'
)
config.symbol_cache_path = '/mnt/socorro/symbols'
config.result_key = 'bogus_command_result'
config.return_code_key = 'bogus_command_return_code'
return config
#--------------------------------------------------------------------------
def get_basic_processor_meta(self):
processor_meta = DotDict()
processor_meta.processor_notes = []
processor_meta.quit_check = lambda: False
return processor_meta
#--------------------------------------------------------------------------
def test_dot_save(self):
d = {}
ExternalProcessRule.dot_save(d, 'x', 1)
ok_(d['x'], 1)
ExternalProcessRule.dot_save(d, 'z.y', 10)
ok_(d['z']['y'], 10)
d['a'] = {}
d['a']['b'] = {}
ExternalProcessRule.dot_save(d, 'a.b.c', 100)
ok_(d['a']['b']['c'], 100)
dd = CDotDict()
ExternalProcessRule.dot_save(dd, 'a.b.c.d.e.f', 1000)
ok_(dd.a.b.c.d.e.f, 1000)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_everything_we_hoped_for(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
cannonical_external_output_str
)
mocked_subprocess_handle.wait.return_value = 0
rule = ExternalProcessRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
mocked_subprocess_module.Popen.assert_called_with(
'timeout -s KILL 30 bogus_command a_fake_dump.dump '
'/mnt/socorro/symbols/symbols_ffx,/mnt/socorro/symbols/'
'symbols_sea,/mnt/socorro/symbols/symbols_tbrd,/mnt/socorro/'
'symbols/symbols_sbrd,/mnt/socorro/symbols/symbols_os'
' 2>/dev/null',
shell=True,
stdout=mocked_subprocess_module.PIPE
)
eq_(
processed_crash.bogus_command_result,
cannonical_external_output
)
eq_(processed_crash.bogus_command_return_code, 0)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_external_fails(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = \
mocked_subprocess_module.Popen.return_value
mocked_subprocess_handle.stdout.read.return_value = '{}'
mocked_subprocess_handle.wait.return_value = 124
rule = ExternalProcessRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.bogus_command_result, {})
eq_(processed_crash.bogus_command_return_code, 124)
eq_(processor_meta.processor_notes, [])
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_external_fails_2(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = int
mocked_subprocess_handle.wait.return_value = -1
rule = ExternalProcessRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.bogus_command_result, {})
eq_(processed_crash.bogus_command_return_code, -1)
eq_(
processor_meta.processor_notes,
[
'bogus_command output failed in '
'json: Expected String or Unicode',
]
)
#==============================================================================
class TestDumpLookupExternalRule(TestCase):
#--------------------------------------------------------------------------
def test_default_parameters(self):
eq_(
DumpLookupExternalRule.required_config.dump_field.default,
'upload_file_minidump'
)
ok_(
DumpLookupExternalRule.required_config.dump_field is not
ExternalProcessRule.required_config.dump_field
)
eq_(
DumpLookupExternalRule.required_config.command_pathname.default,
'/data/socorro/stackwalk/bin/dump-lookup'
)
ok_(
DumpLookupExternalRule.required_config.command_pathname is not
ExternalProcessRule.required_config.command_pathname
)
eq_(
DumpLookupExternalRule.required_config.result_key.default,
'dump_lookup'
)
ok_(
DumpLookupExternalRule.required_config.result_key is not
ExternalProcessRule.required_config.result_key
)
eq_(
DumpLookupExternalRule.required_config.return_code_key.default,
'dump_lookup_return_code'
)
ok_(
DumpLookupExternalRule.required_config.return_code_key is not
ExternalProcessRule.required_config.return_code_key
)
#==============================================================================
class TestBreakpadTransformRule2015(TestCase):
#--------------------------------------------------------------------------
def get_basic_config(self):
config = CDotDict()
config.logger = Mock()
config.chatty = True
config.dump_field = 'upload_file_minidump'
config.command_line = (
BreakpadStackwalkerRule2015.required_config .command_line.default
)
config.command_pathname = '/bin/stackwalker'
config.public_symbols_url = 'https://localhost'
config.private_symbols_url = 'https://localhost'
config.symbol_cache_path = '/mnt/socorro/symbols'
config.temporary_file_system_storage_path = '/tmp'
return config
#--------------------------------------------------------------------------
def get_basic_processor_meta(self):
processor_meta = DotDict()
processor_meta.processor_notes = []
processor_meta.quit_check = lambda: False
return processor_meta
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_everything_we_hoped_for(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
cannonical_stackwalker_output_str
)
mocked_subprocess_handle.wait.return_value = 0
rule = BreakpadStackwalkerRule2015(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.json_dump, cannonical_stackwalker_output)
eq_(processed_crash.mdsw_return_code, 0)
eq_(processed_crash.mdsw_status_string, "OK")
ok_(processed_crash.success)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_stackwalker_fails(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = \
mocked_subprocess_module.Popen.return_value
mocked_subprocess_handle.stdout.read.return_value = '{}\n'
mocked_subprocess_handle.wait.return_value = 124
rule = BreakpadStackwalkerRule2015(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.json_dump, {})
eq_(processed_crash.mdsw_return_code, 124)
eq_(processed_crash.mdsw_status_string, "unknown error")
ok_(not processed_crash.success)
eq_(
processor_meta.processor_notes,
["MDSW terminated with SIGKILL due to timeout", ]
)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_stackwalker_fails_2(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = int
mocked_subprocess_handle.wait.return_value = -1
rule = BreakpadStackwalkerRule2015(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processed_crash.json_dump, {})
eq_(processed_crash.mdsw_return_code, -1)
eq_(processed_crash.mdsw_status_string, "unknown error")
ok_(not processed_crash.success)
eq_(
processor_meta.processor_notes,
[
"{command_pathname} output failed in json: Expected String "
"or Unicode".format(
**config
),
"MDSW failed on 'timeout -s KILL 30 /bin/stackwalker "
"--raw-json /tmp/00000000-0000-0000-0000-000002140504."
"MainThread.TEMPORARY.json --symbols-url https://localhost "
"--symbols-url https://localhost "
"--symbols-cache /mnt/socorro/symbols a_fake_dump.dump "
"2>/dev/null': unknown error"
]
)
#==============================================================================
class TestJitCrashCategorizeRule(TestCase):
#--------------------------------------------------------------------------
def get_basic_config(self):
config = CDotDict()
config.logger = Mock()
config.chatty = True
config.dump_field = 'upload_file_minidump'
config.command_line = (
JitCrashCategorizeRule.required_config.command_line.default
)
config.result_key = 'classifications.jit.category'
config.return_code_key = 'classifications.jit.category_return_code'
config.command_pathname = \
'/data/socorro/stackwalk/bin/jit-crash-categorize'
config.temporary_file_system_storage_path = '/tmp'
config.threshold = 8
return config
#--------------------------------------------------------------------------
def get_basic_processor_meta(self):
processor_meta = DotDict()
processor_meta.processor_notes = []
processor_meta.quit_check = lambda: False
return processor_meta
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_everything_we_hoped_for(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = CDotDict()
processed_crash.product = 'Firefox'
processed_crash.os_name = 'Windows 386'
processed_crash.cpu_name = 'x86'
processed_crash.signature = 'EnterBaseline'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'not_module': 'not-a-module',}),
DotDict({'module': 'a-module',})
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
'EXTRA-SPECIAL'
)
mocked_subprocess_handle.wait.return_value = 0
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processor_meta.processor_notes, [])
eq_(processed_crash.classifications.jit.category, 'EXTRA-SPECIAL')
eq_(processed_crash.classifications.jit.category_return_code, 0)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_everything_we_hoped_for_2(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = CDotDict()
processed_crash.product = 'Firefox'
processed_crash.os_name = 'Windows 386'
processed_crash.cpu_name = 'x86'
processed_crash.signature = 'EnterIon'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'not_module': 'not-a-module',}),
DotDict({'module': 'a-module',})
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
'EXTRA-SPECIAL'
)
mocked_subprocess_handle.wait.return_value = 0
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processor_meta.processor_notes, [])
eq_(processed_crash.classifications.jit.category, 'EXTRA-SPECIAL')
eq_(processed_crash.classifications.jit.category_return_code, 0)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_subprocess_fail(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = CDotDict()
processed_crash.product = 'Firefox'
processed_crash.os_name = 'Windows 386'
processed_crash.cpu_name = 'x86'
processed_crash.signature = 'EnterBaseline'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'not_module': 'not-a-module',}),
DotDict({'module': 'a-module',})
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
None
)
mocked_subprocess_handle.wait.return_value = -1
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
eq_(processor_meta.processor_notes, [])
ok_(processed_crash.classifications.jit.category is None)
eq_(processed_crash.classifications.jit.category_return_code, -1)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_wrong_os(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processed_crash.product = 'Firefox'
processed_crash.os_name = 'MS-DOS'
processed_crash.cpu_name = 'x86'
processed_crash.signature = 'EnterBaseline'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'not_module': 'not-a-module',}),
DotDict({'module': 'a-module',})
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
'EXTRA-SPECIAL'
)
mocked_subprocess_handle.wait.return_value = 0
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
ok_('classifications.jit.category' not in processed_crash)
ok_('classifications.jit.category_return_code' not in processed_crash)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_wrong_product(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processed_crash.product = 'Firefrenzy'
processed_crash.os_name = 'Windows NT'
processed_crash.cpu_name = 'x86'
processed_crash.signature = 'EnterBaseline'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'not_module': 'not-a-module',}),
DotDict({'module': 'a-module',})
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
'EXTRA-SPECIAL'
)
mocked_subprocess_handle.wait.return_value = 0
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
ok_('classifications.jit.category' not in processed_crash)
ok_('classifications.jit.category_return_code' not in processed_crash)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_wrong_cpu(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processed_crash.product = 'Firefox'
processed_crash.os_name = 'Windows NT'
processed_crash.cpu_name = 'VAX 750'
processed_crash.signature = 'EnterBaseline'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'not_module': 'not-a-module',}),
DotDict({'module': 'a-module',})
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
'EXTRA-SPECIAL'
)
mocked_subprocess_handle.wait.return_value = 0
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
ok_('classifications.jit.category' not in processed_crash)
ok_('classifications.jit.category_return_code' not in processed_crash)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_wrong_signature(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processed_crash.product = 'Firefox'
processed_crash.os_name = 'Windows NT'
processed_crash.cpu_name = 'x86'
processed_crash.signature = 'this-is-not-a-JIT-signature'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'not_module': 'not-a-module',}),
DotDict({'module': 'a-module',})
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
'EXTRA-SPECIAL'
)
mocked_subprocess_handle.wait.return_value = 0
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
ok_('classifications.jit.category' not in processed_crash)
ok_('classifications.jit.category_return_code' not in processed_crash)
#--------------------------------------------------------------------------
@patch('socorro.processor.breakpad_transform_rules.subprocess')
def test_module_on_stack_top(self, mocked_subprocess_module):
config = self.get_basic_config()
raw_crash = copy.copy(canonical_standard_raw_crash)
raw_dumps = {config.dump_field: 'a_fake_dump.dump'}
processed_crash = DotDict()
processed_crash.product = 'Firefox'
processed_crash.os_name = 'Windows NT'
processed_crash.cpu_name = 'x86'
processed_crash.signature = 'EnterBaseline'
processed_crash['json_dump.crashing_thread.frames'] = [
DotDict({'module': 'a-module',}),
DotDict({'not_module': 'not-a-module',}),
]
processor_meta = self.get_basic_processor_meta()
mocked_subprocess_handle = (
mocked_subprocess_module.Popen.return_value
)
mocked_subprocess_handle.stdout.read.return_value = (
'EXTRA-SPECIAL'
)
mocked_subprocess_handle.wait.return_value = 0
rule = JitCrashCategorizeRule(config)
# the call to be tested
rule.act(raw_crash, raw_dumps, processed_crash, processor_meta)
ok_('classifications.jit.category' not in processed_crash)
ok_('classifications.jit.category_return_code' not in processed_crash)
| mpl-2.0 |
Tuyki/TT_RNN | MNISTSeq.py | 1 | 14227 | __author__ = "Yinchong Yang"
__copyright__ = "Siemens AG, 2018"
__licencse__ = "MIT"
__version__ = "0.1"
"""
MIT License
Copyright (c) 2018 Siemens AG
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""
We first sample MNIST digits to form sequences of random lengths.
The sequence is labeled as one if it contains a zero, and is labeled zero otherwise.
This simulates a high dimensional sequence classification task, such as predicting therapy decision
and survival of patients based on their historical clinical event information.
We train plain LSTM and Tensor-Train LSTM for this task.
After the training, we apply Layer-wise Relevance Propagation to identify the digit(s) that
have influenced the classification.
Apparently, we would expect the LRP algorithm would assign high relevance value to the zero(s)
in the sequence.
These experiments turn out to be successful, which demonstrates that
i) the LSTM and TT-LSTM can indeed learn the mapping from a zero to the sequence class, and that
ii) both LSTMs have no problem in storing the zero pattern over a period of time, because the
classifier is deployed only at the last hidden state, and that
iii) the implementation of the LRP algorithm, complex as it is, is also correct, in that
the zeros are assigned high relevance scores.
Especially the experiments with the plain LSTM serve as simulation study supporting our submission of
“Yinchong Yang, Volker Tresp, Marius Wunderle, Peter A. Fasching,
Explaining Therapy Predictions with Layer-wise Relevance Propagation in Neural Networks, at IEEE ICHI 2018”.
The original LRP for LSTM from the repository:
https://github.com/ArrasL/LRP_for_LSTM
which we modified and adjusted for keras models.
Feel free to experiment with the hyper parameters and suggest other sequence classification tasks.
Have fun ;)
"""
import pickle
import sys
import numpy as np
from numpy import newaxis as na
import keras
from keras.layers.recurrent import Recurrent
from keras import backend as K
from keras.engine import InputSpec
from keras import activations
from keras import initializers
from keras import regularizers
from keras import constraints
from keras.engine.topology import Layer
from TTLayer import *
from TTRNN import TT_LSTM
def make_seq(n, x, y, maxlen=32, seed=123):
np.random.seed(seed)
lens = np.random.choice(range(2, maxlen), n)
seqs = np.zeros((n, maxlen, 28**2))
labels = np.zeros(n)
digits_label = np.zeros((n, maxlen), dtype='int32')-1
ids = np.zeros((n, maxlen), dtype='int64')-1
for i in range(n):
digits_inds = np.random.choice(range(x.shape[0]), lens[i])
ids[i, -lens[i]::] = digits_inds
seqs[i, -lens[i]::, :] = x[digits_inds]
digits_label[i, -lens[i]::] = y[digits_inds]
class_inds = y[digits_inds]
if True:
# option 1: is there any 0 in the sequence?
labels[i] = (0 in class_inds)
else:
# option 2: even number of 0 -> label=0, odd number of 0 -> label=1
labels[i] = len(np.where(class_inds == 0)[0]) % 2 == 1
return [seqs, labels, digits_label, ids]
# From: https://github.com/ArrasL/LRP_for_LSTM
def lrp_linear(hin, w, b, hout, Rout, bias_nb_units, eps, bias_factor, debug=False):
"""
LRP for a linear layer with input dim D and output dim M.
Args:
- hin: forward pass input, of shape (D,)
- w: connection weights, of shape (D, M)
- b: biases, of shape (M,)
- hout: forward pass output, of shape (M,) (unequal to np.dot(w.T,hin)+b if more than one incoming layer!)
- Rout: relevance at layer output, of shape (M,)
- bias_nb_units: number of lower-layer units onto which the bias/stabilizer contribution is redistributed
- eps: stabilizer (small positive number)
- bias_factor: for global relevance conservation set to 1.0, otherwise 0.0 to ignore bias redistribution
Returns:
- Rin: relevance at layer input, of shape (D,)
"""
sign_out = np.where(hout[na, :] >= 0, 1., -1.) # shape (1, M)
numer = (w * hin[:, na]) + \
((bias_factor * b[na, :] * 1. + eps * sign_out * 1.) * 1. / bias_nb_units) # shape (D, M)
denom = hout[na, :] + (eps * sign_out * 1.) # shape (1, M)
message = (numer / denom) * Rout[na, :] # shape (D, M)
Rin = message.sum(axis=1) # shape (D,)
# Note: local layer relevance conservation if bias_factor==1.0 and bias_nb_units==D
# global network relevance conservation if bias_factor==1.0 (can be used for sanity check)
if debug:
print("local diff: ", Rout.sum() - Rin.sum())
return Rin
def sigmoid(x):
x = x.astype('float128')
return 1. / (1. + np.exp(-x))
# Modified from https://github.com/ArrasL/LRP_for_LSTM
def lstm_lrp(l, d, train_data = True):
if train_data:
x_l = X_tr[l]
y_l = Y_tr[l]
z_l = Z_tr[l]
# d_l = d_tr[l]
else:
x_l = X_te[l]
y_l = Y_te[l]
z_l = Z_te[l]
# d_l = d_te[l]
# calculate the FF pass in LSTM for every time step
pre_gates = np.zeros((MAXLEN, d*4))
gates = np.zeros((MAXLEN, d * 4))
h = np.zeros((MAXLEN, d))
c = np.zeros((MAXLEN, d))
for t in range(MAXLEN):
z = np.dot(x_l[t], Ws)
if t > 0:
z += np.dot(h[t-1], Us)
z += b
pre_gates[t] = z
z0 = z[0:d]
z1 = z[d:2*d]
z2 = z[2*d:3*d]
z3 = z[3 * d::]
i = sigmoid(z0)
f = sigmoid(z1)
c[t] = f * c[t-1] + i * np.tanh(z2)
o = sigmoid(z3)
h[t] = o * np.tanh(c[t])
gates[t] = np.concatenate([i, f, np.tanh(z2), o])
# check: z_l[12] / h[-1][12]
Rh = np.zeros((MAXLEN, d))
Rc = np.zeros((MAXLEN, d))
Rg = np.zeros((MAXLEN, d))
Rx = np.zeros((MAXLEN, 28**2))
bias_factor = 0
Rh[MAXLEN-1] = lrp_linear(hin=z_l,
w=Dense_w,
b=np.array(Dense_b),
hout=np.dot(z_l, Dense_w)+Dense_b,
Rout=np.array([y_l]),
bias_nb_units=len(z_l),
eps=eps,
bias_factor=bias_factor)
for t in reversed(range(MAXLEN)):
# t = MAXLEN-1
# print t
Rc[t] += Rh[t]
# Rc[t] = Rh[t]
if t > 0:
Rc[t-1] = lrp_linear(gates[t, d: 2 * d] * c[t - 1], # gates[t , 2 *d: 3 *d ] *c[ t -1],
np.identity(d),
np.zeros((d)),
c[t],
Rc[t],
2*d,
eps,
bias_factor,
debug=False)
Rg[t] = lrp_linear(gates[t, 0:d] * gates[t, 2*d:3*d], # h_input: i + g
np.identity(d), # W
np.zeros((d)), # b
c[t], # h_output
Rc[t], # R_output
2 * d,
eps,
bias_factor,
debug=False)
# foo = np.dot(x_l[t], Ws[:,2*d:3*d]) + np.dot(h[t-1], Us[:, 2*d:3*d]) + b[2*d:3*d]
Rx[t] = lrp_linear(x_l[t],
Ws[:,2*d:3*d],
b[2*d:3*d],
pre_gates[t, 2*d:3*d],
Rg[t],
d + 28 ** 2,
eps,
bias_factor,
debug=False)
if t > 0:
Rh[t-1] = lrp_linear(h[t-1],
Us[:,2*d:3*d],
b[2*d:3*d],
pre_gates[t, 2 * d:3 * d],
Rg[t],
d + 28**2,
eps,
bias_factor,
debug=False)
# hin, w, b, hout, Rout, bias_nb_units, eps, bias_factor, debug=False
# Rx[np.where(d_l==-1.)[0]] *= 0
return Rx
from keras.datasets import mnist
from keras.utils import to_categorical
from keras.models import Model, Input
from keras.layers import Dense, GRU, LSTM, Dropout, Masking
from keras.optimizers import *
from keras.regularizers import l2
from sklearn.metrics import *
# Script configurations ###################################################################
seed=111111
use_TT = True # whether use Tensor-Train or plain RNNs
# Prepare the data ########################################################################
# Load the MNIST data and build sequences:
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(x_train.shape[0], -1)
x_test = x_test.reshape(x_test.shape[0], -1)
MAXLEN = 32 # max length of the sequences
X_tr, Y_tr, d_tr, idx_tr = make_seq(n=10000, x=x_train, y=y_train, maxlen=MAXLEN, seed=seed)
X_te, Y_te, d_te, idx_te = make_seq(n=1000, x=x_test, y=y_test, maxlen=MAXLEN, seed=seed+1)
# Define the model ######################################################################
if use_TT:
# TT settings
tt_input_shape = [7, 7, 16]
tt_output_shape = [4, 4, 4]
tt_ranks = [1, 4, 4, 1]
rnn_size = 64
X = Input(shape=X_tr.shape[1::])
X_mask = Masking(mask_value=0.0, input_shape=X_tr.shape[1::])(X)
if use_TT:
Z = TT_LSTM(tt_input_shape=tt_input_shape, tt_output_shape=tt_output_shape, tt_ranks=tt_ranks,
return_sequences=False, recurrent_dropout=.5)(X_mask)
Out = Dense(units=1, activation='sigmoid', kernel_regularizer=l2(1e-2))(Z)
else:
Z = LSTM(units=rnn_size, return_sequences=False, recurrent_dropout=.5)(X_mask) # dropout=.5,
Out = Dense(units=1, activation='sigmoid', kernel_regularizer=l2(1e-2))(Z)
rnn_model = Model(X, Out)
rnn_model.compile(optimizer=Adam(1e-3), loss='binary_crossentropy',
metrics=['accuracy'])
# Train the model and save the results ######################################################
rnn_model.fit(X_tr, Y_tr, epochs=50, batch_size=32, validation_split=.2, verbose=2)
Y_hat = rnn_model.predict(X_tr, verbose=2).reshape(-1)
train_acc = (np.round(Y_hat) == Y_tr).mean()
Y_pred = rnn_model.predict(X_te, verbose=2).reshape(-1)
(np.round(Y_pred) == Y_te).mean()
pred_acc = (np.round(Y_pred) == Y_te).mean()
# Collect all hidden layers ################################################################
if use_TT:
# Reconstruct the fully connected input-to-hidden weights:
from keras.initializers import constant
_tt_output_shape = np.copy(tt_output_shape)
_tt_output_shape[0] *= 4
fc_w = rnn_model.get_weights()[0]
fc_layer = TT_Layer(tt_input_shape=tt_input_shape, tt_output_shape=_tt_output_shape, tt_ranks=tt_ranks,
kernel_initializer=constant(value=fc_w), use_bias=False)
fc_input = Input(shape=(X_tr.shape[2],))
fc_output = fc_layer(fc_input)
fc_model = Model(fc_input, fc_output)
fc_model.compile('sgd', 'mse')
fc_recon_mat = fc_model.predict(np.identity(X_tr.shape[2]))
# Reconstruct the entire LSTM:
fc_Z = LSTM(units=np.prod(tt_output_shape), return_sequences=False, dropout=.5, recurrent_dropout=.5,
weights=[fc_recon_mat, rnn_model.get_weights()[2], rnn_model.get_weights()[1]])(X_mask)
else:
fc_Z = LSTM(units=rnn_size, return_sequences=False, dropout=.5, recurrent_dropout=.5,
weights=rnn_model.get_weights()[0:3])(X_mask)
fc_Out = Dense(units=1, activation='sigmoid', kernel_regularizer=l2(1e-3),
weights=rnn_model.get_weights()[3::])(fc_Z)
fc_rnn_model = Model(X, fc_Out)
fc_rnn_model.compile(optimizer=Adam(1e-3), loss='binary_crossentropy',
metrics=['accuracy'])
fc_rnn_model.evaluate(X_te, Y_te, verbose=2)
# Calculate the LRP: #########################################################################
fc_Z_model = Model(X, fc_Z)
fc_Z_model.compile('sgd', 'mse')
Y_hat_fc = fc_rnn_model.predict(X_tr)
Y_pred_fc = fc_rnn_model.predict(X_te)
Ws = fc_rnn_model.get_weights()[0]
Us = fc_rnn_model.get_weights()[1]
b = fc_rnn_model.get_weights()[2]
Dense_w = fc_rnn_model.get_weights()[3]
Dense_b = fc_rnn_model.get_weights()[4]
Z_tr = fc_Z_model.predict(X_tr)
Z_te = fc_Z_model.predict(X_te)
eps = 1e-4
is_number_flag = np.where(d_te != -1)
# All relevance scores of the test sequences
lrp_te = np.vstack([lstm_lrp(i, rnn_size, False).sum(1) for i in range(X_te.shape[0])])
lrp_auroc = roc_auc_score((d_te == 0).astype('int')[is_number_flag].reshape(-1),
lrp_te[is_number_flag].reshape(-1))
lrp_auprc = average_precision_score((d_te == 0).astype('int')[is_number_flag].reshape(-1),
lrp_te[is_number_flag].reshape(-1))
# The reported results:
print pred_acc
print lrp_auroc
print lrp_auprc
| mit |
gohin/django | django/db/migrations/state.py | 138 | 24339 | from __future__ import unicode_literals
import copy
from collections import OrderedDict
from contextlib import contextmanager
from django.apps import AppConfig
from django.apps.registry import Apps, apps as global_apps
from django.conf import settings
from django.db import models
from django.db.models.fields.proxy import OrderWrt
from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
from django.db.models.options import DEFAULT_NAMES, normalize_together
from django.db.models.utils import make_model_tuple
from django.utils import six
from django.utils.encoding import force_text, smart_text
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from django.utils.version import get_docs_version
from .exceptions import InvalidBasesError
def _get_app_label_and_model_name(model, app_label=''):
if isinstance(model, six.string_types):
split = model.split('.', 1)
return (tuple(split) if len(split) == 2 else (app_label, split[0]))
else:
return model._meta.app_label, model._meta.model_name
def get_related_models_recursive(model):
"""
Returns all models that have a direct or indirect relationship
to the given model.
Relationships are either defined by explicit relational fields, like
ForeignKey, ManyToManyField or OneToOneField, or by inheriting from another
model (a superclass is related to its subclasses, but not vice versa). Note,
however, that a model inheriting from a concrete model is also related to
its superclass through the implicit *_ptr OneToOneField on the subclass.
"""
def _related_models(m):
return [
f.related_model for f in m._meta.get_fields(include_parents=True, include_hidden=True)
if f.is_relation and f.related_model is not None and not isinstance(f.related_model, six.string_types)
] + [
subclass for subclass in m.__subclasses__()
if issubclass(subclass, models.Model)
]
seen = set()
queue = _related_models(model)
for rel_mod in queue:
rel_app_label, rel_model_name = rel_mod._meta.app_label, rel_mod._meta.model_name
if (rel_app_label, rel_model_name) in seen:
continue
seen.add((rel_app_label, rel_model_name))
queue.extend(_related_models(rel_mod))
return seen - {(model._meta.app_label, model._meta.model_name)}
class ProjectState(object):
"""
Represents the entire project's overall state.
This is the item that is passed around - we do it here rather than at the
app level so that cross-app FKs/etc. resolve properly.
"""
def __init__(self, models=None, real_apps=None):
self.models = models or {}
# Apps to include from main registry, usually unmigrated ones
self.real_apps = real_apps or []
def add_model(self, model_state):
app_label, model_name = model_state.app_label, model_state.name_lower
self.models[(app_label, model_name)] = model_state
if 'apps' in self.__dict__: # hasattr would cache the property
self.reload_model(app_label, model_name)
def remove_model(self, app_label, model_name):
del self.models[app_label, model_name]
if 'apps' in self.__dict__: # hasattr would cache the property
self.apps.unregister_model(app_label, model_name)
# Need to do this explicitly since unregister_model() doesn't clear
# the cache automatically (#24513)
self.apps.clear_cache()
def reload_model(self, app_label, model_name):
if 'apps' in self.__dict__: # hasattr would cache the property
try:
old_model = self.apps.get_model(app_label, model_name)
except LookupError:
related_models = set()
else:
# Get all relations to and from the old model before reloading,
# as _meta.apps may change
related_models = get_related_models_recursive(old_model)
# Get all outgoing references from the model to be rendered
model_state = self.models[(app_label, model_name)]
# Directly related models are the models pointed to by ForeignKeys,
# OneToOneFields, and ManyToManyFields.
direct_related_models = set()
for name, field in model_state.fields:
if field.is_relation:
if field.remote_field.model == RECURSIVE_RELATIONSHIP_CONSTANT:
continue
rel_app_label, rel_model_name = _get_app_label_and_model_name(field.related_model, app_label)
direct_related_models.add((rel_app_label, rel_model_name.lower()))
# For all direct related models recursively get all related models.
related_models.update(direct_related_models)
for rel_app_label, rel_model_name in direct_related_models:
try:
rel_model = self.apps.get_model(rel_app_label, rel_model_name)
except LookupError:
pass
else:
related_models.update(get_related_models_recursive(rel_model))
# Include the model itself
related_models.add((app_label, model_name))
# Unregister all related models
with self.apps.bulk_update():
for rel_app_label, rel_model_name in related_models:
self.apps.unregister_model(rel_app_label, rel_model_name)
states_to_be_rendered = []
# Gather all models states of those models that will be rerendered.
# This includes:
# 1. All related models of unmigrated apps
for model_state in self.apps.real_models:
if (model_state.app_label, model_state.name_lower) in related_models:
states_to_be_rendered.append(model_state)
# 2. All related models of migrated apps
for rel_app_label, rel_model_name in related_models:
try:
model_state = self.models[rel_app_label, rel_model_name]
except KeyError:
pass
else:
states_to_be_rendered.append(model_state)
# Render all models
self.apps.render_multiple(states_to_be_rendered)
def clone(self):
"Returns an exact copy of this ProjectState"
new_state = ProjectState(
models={k: v.clone() for k, v in self.models.items()},
real_apps=self.real_apps,
)
if 'apps' in self.__dict__:
new_state.apps = self.apps.clone()
return new_state
@cached_property
def apps(self):
return StateApps(self.real_apps, self.models)
@property
def concrete_apps(self):
self.apps = StateApps(self.real_apps, self.models, ignore_swappable=True)
return self.apps
@classmethod
def from_apps(cls, apps):
"Takes in an Apps and returns a ProjectState matching it"
app_models = {}
for model in apps.get_models(include_swapped=True):
model_state = ModelState.from_model(model)
app_models[(model_state.app_label, model_state.name_lower)] = model_state
return cls(app_models)
def __eq__(self, other):
if set(self.models.keys()) != set(other.models.keys()):
return False
if set(self.real_apps) != set(other.real_apps):
return False
return all(model == other.models[key] for key, model in self.models.items())
def __ne__(self, other):
return not (self == other)
class AppConfigStub(AppConfig):
"""
Stubs a Django AppConfig. Only provides a label, and a dict of models.
"""
# Not used, but required by AppConfig.__init__
path = ''
def __init__(self, label):
self.label = label
# App-label and app-name are not the same thing, so technically passing
# in the label here is wrong. In practice, migrations don't care about
# the app name, but we need something unique, and the label works fine.
super(AppConfigStub, self).__init__(label, None)
def import_models(self, all_models):
self.models = all_models
class StateApps(Apps):
"""
Subclass of the global Apps registry class to better handle dynamic model
additions and removals.
"""
def __init__(self, real_apps, models, ignore_swappable=False):
# Any apps in self.real_apps should have all their models included
# in the render. We don't use the original model instances as there
# are some variables that refer to the Apps object.
# FKs/M2Ms from real apps are also not included as they just
# mess things up with partial states (due to lack of dependencies)
self.real_models = []
for app_label in real_apps:
app = global_apps.get_app_config(app_label)
for model in app.get_models():
self.real_models.append(ModelState.from_model(model, exclude_rels=True))
# Populate the app registry with a stub for each application.
app_labels = {model_state.app_label for model_state in models.values()}
app_configs = [AppConfigStub(label) for label in sorted(real_apps + list(app_labels))]
super(StateApps, self).__init__(app_configs)
self.render_multiple(list(models.values()) + self.real_models)
# There shouldn't be any operations pending at this point.
pending_models = set(self._pending_operations)
if ignore_swappable:
pending_models -= {make_model_tuple(settings.AUTH_USER_MODEL)}
if pending_models:
msg = "Unhandled pending operations for models: %s"
labels = (".".join(model_key) for model_key in self._pending_operations)
raise ValueError(msg % ", ".join(labels))
@contextmanager
def bulk_update(self):
# Avoid clearing each model's cache for each change. Instead, clear
# all caches when we're finished updating the model instances.
ready = self.ready
self.ready = False
try:
yield
finally:
self.ready = ready
self.clear_cache()
def render_multiple(self, model_states):
# We keep trying to render the models in a loop, ignoring invalid
# base errors, until the size of the unrendered models doesn't
# decrease by at least one, meaning there's a base dependency loop/
# missing base.
if not model_states:
return
# Prevent that all model caches are expired for each render.
with self.bulk_update():
unrendered_models = model_states
while unrendered_models:
new_unrendered_models = []
for model in unrendered_models:
try:
model.render(self)
except InvalidBasesError:
new_unrendered_models.append(model)
if len(new_unrendered_models) == len(unrendered_models):
raise InvalidBasesError(
"Cannot resolve bases for %r\nThis can happen if you are inheriting models from an "
"app with migrations (e.g. contrib.auth)\n in an app with no migrations; see "
"https://docs.djangoproject.com/en/%s/topics/migrations/#dependencies "
"for more" % (new_unrendered_models, get_docs_version())
)
unrendered_models = new_unrendered_models
def clone(self):
"""
Return a clone of this registry, mainly used by the migration framework.
"""
clone = StateApps([], {})
clone.all_models = copy.deepcopy(self.all_models)
clone.app_configs = copy.deepcopy(self.app_configs)
# No need to actually clone them, they'll never change
clone.real_models = self.real_models
return clone
def register_model(self, app_label, model):
self.all_models[app_label][model._meta.model_name] = model
if app_label not in self.app_configs:
self.app_configs[app_label] = AppConfigStub(app_label)
self.app_configs[app_label].models = OrderedDict()
self.app_configs[app_label].models[model._meta.model_name] = model
self.do_pending_operations(model)
self.clear_cache()
def unregister_model(self, app_label, model_name):
try:
del self.all_models[app_label][model_name]
del self.app_configs[app_label].models[model_name]
except KeyError:
pass
class ModelState(object):
"""
Represents a Django Model. We don't use the actual Model class
as it's not designed to have its options changed - instead, we
mutate this one and then render it into a Model as required.
Note that while you are allowed to mutate .fields, you are not allowed
to mutate the Field instances inside there themselves - you must instead
assign new ones, as these are not detached during a clone.
"""
def __init__(self, app_label, name, fields, options=None, bases=None, managers=None):
self.app_label = app_label
self.name = force_text(name)
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model, )
self.managers = managers or []
# Sanity-check that fields is NOT a dict. It must be ordered.
if isinstance(self.fields, dict):
raise ValueError("ModelState.fields cannot be a dict - it must be a list of 2-tuples.")
for name, field in fields:
# Sanity-check that fields are NOT already bound to a model.
if hasattr(field, 'model'):
raise ValueError(
'ModelState.fields cannot be bound to a model - "%s" is.' % name
)
# Sanity-check that relation fields are NOT referring to a model class.
if field.is_relation and hasattr(field.related_model, '_meta'):
raise ValueError(
'ModelState.fields cannot refer to a model class - "%s.to" does. '
'Use a string reference instead.' % name
)
if field.many_to_many and hasattr(field.remote_field.through, '_meta'):
raise ValueError(
'ModelState.fields cannot refer to a model class - "%s.through" does. '
'Use a string reference instead.' % name
)
@cached_property
def name_lower(self):
return self.name.lower()
@classmethod
def from_model(cls, model, exclude_rels=False):
"""
Feed me a model, get a ModelState representing it out.
"""
# Deconstruct the fields
fields = []
for field in model._meta.local_fields:
if getattr(field, "remote_field", None) and exclude_rels:
continue
if isinstance(field, OrderWrt):
continue
name = force_text(field.name, strings_only=True)
try:
fields.append((name, field.clone()))
except TypeError as e:
raise TypeError("Couldn't reconstruct field %s on %s: %s" % (
name,
model._meta.label,
e,
))
if not exclude_rels:
for field in model._meta.local_many_to_many:
name = force_text(field.name, strings_only=True)
try:
fields.append((name, field.clone()))
except TypeError as e:
raise TypeError("Couldn't reconstruct m2m field %s on %s: %s" % (
name,
model._meta.object_name,
e,
))
# Extract the options
options = {}
for name in DEFAULT_NAMES:
# Ignore some special options
if name in ["apps", "app_label"]:
continue
elif name in model._meta.original_attrs:
if name == "unique_together":
ut = model._meta.original_attrs["unique_together"]
options[name] = set(normalize_together(ut))
elif name == "index_together":
it = model._meta.original_attrs["index_together"]
options[name] = set(normalize_together(it))
else:
options[name] = model._meta.original_attrs[name]
# Force-convert all options to text_type (#23226)
options = cls.force_text_recursive(options)
# If we're ignoring relationships, remove all field-listing model
# options (that option basically just means "make a stub model")
if exclude_rels:
for key in ["unique_together", "index_together", "order_with_respect_to"]:
if key in options:
del options[key]
def flatten_bases(model):
bases = []
for base in model.__bases__:
if hasattr(base, "_meta") and base._meta.abstract:
bases.extend(flatten_bases(base))
else:
bases.append(base)
return bases
# We can't rely on __mro__ directly because we only want to flatten
# abstract models and not the whole tree. However by recursing on
# __bases__ we may end up with duplicates and ordering issues, we
# therefore discard any duplicates and reorder the bases according
# to their index in the MRO.
flattened_bases = sorted(set(flatten_bases(model)), key=lambda x: model.__mro__.index(x))
# Make our record
bases = tuple(
(
base._meta.label_lower
if hasattr(base, "_meta") else
base
)
for base in flattened_bases
)
# Ensure at least one base inherits from models.Model
if not any((isinstance(base, six.string_types) or issubclass(base, models.Model)) for base in bases):
bases = (models.Model,)
# Constructs all managers on the model
managers_mapping = {}
def reconstruct_manager(mgr):
as_manager, manager_path, qs_path, args, kwargs = mgr.deconstruct()
if as_manager:
qs_class = import_string(qs_path)
instance = qs_class.as_manager()
else:
manager_class = import_string(manager_path)
instance = manager_class(*args, **kwargs)
# We rely on the ordering of the creation_counter of the original
# instance
name = force_text(mgr.name)
managers_mapping[name] = (mgr.creation_counter, instance)
if hasattr(model, "_default_manager"):
default_manager_name = force_text(model._default_manager.name)
# Make sure the default manager is always the first
if model._default_manager.use_in_migrations:
reconstruct_manager(model._default_manager)
else:
# Force this manager to be the first and thus default
managers_mapping[default_manager_name] = (0, models.Manager())
# Sort all managers by their creation counter
for _, manager, _ in sorted(model._meta.managers):
if manager.name == "_base_manager" or not manager.use_in_migrations:
continue
reconstruct_manager(manager)
# Sort all managers by their creation counter but take only name and
# instance for further processing
managers = [
(name, instance) for name, (cc, instance) in
sorted(managers_mapping.items(), key=lambda v: v[1])
]
# If the only manager on the model is the default manager defined
# by Django (`objects = models.Manager()`), this manager will not
# be added to the model state.
if managers == [('objects', models.Manager())]:
managers = []
else:
managers = []
# Construct the new ModelState
return cls(
model._meta.app_label,
model._meta.object_name,
fields,
options,
bases,
managers,
)
@classmethod
def force_text_recursive(cls, value):
if isinstance(value, six.string_types):
return smart_text(value)
elif isinstance(value, list):
return [cls.force_text_recursive(x) for x in value]
elif isinstance(value, tuple):
return tuple(cls.force_text_recursive(x) for x in value)
elif isinstance(value, set):
return set(cls.force_text_recursive(x) for x in value)
elif isinstance(value, dict):
return {
cls.force_text_recursive(k): cls.force_text_recursive(v)
for k, v in value.items()
}
return value
def construct_managers(self):
"Deep-clone the managers using deconstruction"
# Sort all managers by their creation counter
sorted_managers = sorted(self.managers, key=lambda v: v[1].creation_counter)
for mgr_name, manager in sorted_managers:
mgr_name = force_text(mgr_name)
as_manager, manager_path, qs_path, args, kwargs = manager.deconstruct()
if as_manager:
qs_class = import_string(qs_path)
yield mgr_name, qs_class.as_manager()
else:
manager_class = import_string(manager_path)
yield mgr_name, manager_class(*args, **kwargs)
def clone(self):
"Returns an exact copy of this ModelState"
return self.__class__(
app_label=self.app_label,
name=self.name,
fields=list(self.fields),
options=dict(self.options),
bases=self.bases,
managers=list(self.managers),
)
def render(self, apps):
"Creates a Model object from our current state into the given apps"
# First, make a Meta object
meta_contents = {'app_label': self.app_label, "apps": apps}
meta_contents.update(self.options)
meta = type(str("Meta"), tuple(), meta_contents)
# Then, work out our bases
try:
bases = tuple(
(apps.get_model(base) if isinstance(base, six.string_types) else base)
for base in self.bases
)
except LookupError:
raise InvalidBasesError("Cannot resolve one or more bases from %r" % (self.bases,))
# Turn fields into a dict for the body, add other bits
body = {name: field.clone() for name, field in self.fields}
body['Meta'] = meta
body['__module__'] = "__fake__"
# Restore managers
body.update(self.construct_managers())
# Then, make a Model object (apps.register_model is called in __new__)
return type(
str(self.name),
bases,
body,
)
def get_field_by_name(self, name):
for fname, field in self.fields:
if fname == name:
return field
raise ValueError("No field called %s on model %s" % (name, self.name))
def __repr__(self):
return "<ModelState: '%s.%s'>" % (self.app_label, self.name)
def __eq__(self, other):
return (
(self.app_label == other.app_label) and
(self.name == other.name) and
(len(self.fields) == len(other.fields)) and
all((k1 == k2 and (f1.deconstruct()[1:] == f2.deconstruct()[1:]))
for (k1, f1), (k2, f2) in zip(self.fields, other.fields)) and
(self.options == other.options) and
(self.bases == other.bases) and
(self.managers == other.managers)
)
def __ne__(self, other):
return not (self == other)
| bsd-3-clause |
pombredanne/MOG | nova/api/openstack/compute/contrib/fixed_ips.py | 12 | 3544 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack import extensions
from nova import db
from nova import exception
from nova.openstack.common.gettextutils import _
authorize = extensions.extension_authorizer('compute', 'fixed_ips')
class FixedIPController(object):
def show(self, req, id):
"""Return data about the given fixed ip."""
context = req.environ['nova.context']
authorize(context)
try:
fixed_ip = db.fixed_ip_get_by_address_detailed(context, id)
except (exception.FixedIpNotFoundForAddress,
exception.FixedIpInvalid) as ex:
raise webob.exc.HTTPNotFound(explanation=ex.format_message())
fixed_ip_info = {"fixed_ip": {}}
if fixed_ip[1] is None:
msg = _("Fixed IP %s has been deleted") % id
raise webob.exc.HTTPNotFound(explanation=msg)
fixed_ip_info['fixed_ip']['cidr'] = fixed_ip[1]['cidr']
fixed_ip_info['fixed_ip']['address'] = fixed_ip[0]['address']
if fixed_ip[2]:
fixed_ip_info['fixed_ip']['hostname'] = fixed_ip[2]['hostname']
fixed_ip_info['fixed_ip']['host'] = fixed_ip[2]['host']
else:
fixed_ip_info['fixed_ip']['hostname'] = None
fixed_ip_info['fixed_ip']['host'] = None
return fixed_ip_info
def action(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
if 'reserve' in body:
return self._set_reserved(context, id, True)
elif 'unreserve' in body:
return self._set_reserved(context, id, False)
else:
raise webob.exc.HTTPBadRequest(
explanation="No valid action specified")
def _set_reserved(self, context, address, reserved):
try:
fixed_ip = db.fixed_ip_get_by_address(context, address)
db.fixed_ip_update(context, fixed_ip['address'],
{'reserved': reserved})
except (exception.FixedIpNotFoundForAddress, exception.FixedIpInvalid):
msg = _("Fixed IP %s not found") % address
raise webob.exc.HTTPNotFound(explanation=msg)
return webob.exc.HTTPAccepted()
class Fixed_ips(extensions.ExtensionDescriptor):
"""Fixed IPs support."""
name = "FixedIPs"
alias = "os-fixed-ips"
namespace = "http://docs.openstack.org/compute/ext/fixed_ips/api/v2"
updated = "2012-10-18T13:25:27-06:00"
def __init__(self, ext_mgr):
ext_mgr.register(self)
def get_resources(self):
member_actions = {'action': 'POST'}
resources = []
resource = extensions.ResourceExtension('os-fixed-ips',
FixedIPController(),
member_actions=member_actions)
resources.append(resource)
return resources
| apache-2.0 |
gwpy/vet | gwvet/_version.py | 1 | 16743 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.16 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig(object):
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = ""
cfg.versionfile_source = "gwvet/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
"""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}
| gpl-3.0 |
justincely/rolodex | setup.py | 1 | 2102 | from setuptools import setup, find_packages
setup(
name = 'cos_monitoring',
version = '0.0.1',
description = 'Provide utilities and monotiring of cos data',
author = 'Justin Ely',
author_email = '[email protected]',
keywords = ['astronomy'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 1 - Planning',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Scientific/Engineering :: Physics',
'Topic :: Software Development :: Libraries :: Python Modules'],
packages = find_packages(),
requires = ['numpy', 'scipy', 'astropy', 'matplotlib'],
entry_points = {'console_scripts': ['clean_slate=cos_monitoring.database:clean_slate',
'cm_ingest=cos_monitoring.database:ingest_all',
'cm_monitors=cos_monitoring.database:run_all_monitors',
'create_master_csv=scripts.create_master_csv:main',
'cosmo_retrieval=cos_monitoring.retrieval.run_cosmo_retrieval',
'cm_reports=cos_monitoring.database.report:query_all',
'cm_delete=cos_monitoring.database.database:cm_delete',
'cm_describe=cos_monitoring.database.database:cm_describe',
'cm_tot_gain=cos_monitoring.cci.gainmap:make_all_gainmaps_entry'],
},
install_requires = ['setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'sqlalchemy>=1.0.12',
'pymysql',
'matplotlib',
'scipy',
'fitsio',
'psutil',
'beautifulsoup4',
'pyfastcopy']
)
| bsd-3-clause |
mhostetter/gnuradio | gr-filter/python/filter/qa_fir_filter.py | 47 | 6862 | #!/usr/bin/env python
#
# Copyright 2008,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from gnuradio import gr, gr_unittest, filter, blocks
def fir_filter(x, taps, decim=1):
y = []
x2 = (len(taps)-1)*[0,] + x
for i in range(0, len(x), decim):
yi = 0
for j in range(len(taps)):
yi += taps[len(taps)-1-j] * x2[i+j]
y.append(yi)
return y
class test_filter(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block ()
def tearDown(self):
self.tb = None
def test_fir_filter_fff_001(self):
decim = 1
taps = 20*[0.5, 0.5]
src_data = 40*[1, 2, 3, 4]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_f(src_data)
op = filter.fir_filter_fff(decim, taps)
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertFloatTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_fff_002(self):
decim = 4
taps = 20*[0.5, 0.5]
src_data = 40*[1, 2, 3, 4]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_f(src_data)
op = filter.fir_filter_fff(decim, taps)
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertFloatTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccf_001(self):
decim = 1
taps = 20*[0.5, 0.5]
src_data = 40*[1+1j, 2+2j, 3+3j, 4+4j]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_c(src_data)
op = filter.fir_filter_ccf(decim, taps)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccf_002(self):
decim = 4
taps = 20*[0.5, 0.5]
src_data = 40*[1+1j, 2+2j, 3+3j, 4+4j]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_c(src_data)
op = filter.fir_filter_ccf(decim, taps)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccc_001(self):
decim = 1
taps = 20*[0.5+1j, 0.5+1j]
src_data = 40*[1+1j, 2+2j, 3+3j, 4+4j]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_c(src_data)
op = filter.fir_filter_ccc(decim, taps)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccc_002(self):
decim = 1
taps = filter.firdes.low_pass(1, 1, 0.1, 0.01)
src_data = 10*[1+1j, 2+2j, 3+3j, 4+4j]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_c(src_data)
op = filter.fir_filter_ccc(decim, taps)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccc_003(self):
decim = 4
taps = 20*[0.5+1j, 0.5+1j]
src_data = 40*[1+1j, 2+2j, 3+3j, 4+4j]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_c(src_data)
op = filter.fir_filter_ccc(decim, taps)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_scc_001(self):
decim = 1
taps = 20*[0.5+1j, 0.5+1j]
src_data = 40*[1, 2, 3, 4]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_s(src_data)
op = filter.fir_filter_scc(decim, taps)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_scc_002(self):
decim = 4
taps = 20*[0.5+1j, 0.5+1j]
src_data = 40*[1, 2, 3, 4]
expected_data = fir_filter(src_data, taps, decim)
src = blocks.vector_source_s(src_data)
op = filter.fir_filter_scc(decim, taps)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_fsf_001(self):
decim = 1
taps = 20*[0.5, 0.5]
src_data = 40*[1, 2, 3, 4]
expected_data = fir_filter(src_data, taps, decim)
expected_data = [int(e) for e in expected_data]
src = blocks.vector_source_f(src_data)
op = filter.fir_filter_fsf(decim, taps)
dst = blocks.vector_sink_s()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_fsf_002(self):
decim = 4
taps = 20*[0.5, 0.5]
src_data = 40*[1, 2, 3, 4]
expected_data = fir_filter(src_data, taps, decim)
expected_data = [int(e) for e in expected_data]
src = blocks.vector_source_f(src_data)
op = filter.fir_filter_fsf(decim, taps)
dst = blocks.vector_sink_s()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
if __name__ == '__main__':
gr_unittest.run(test_filter, "test_filter.xml")
| gpl-3.0 |
pycroscopy/pycroscopy | pycroscopy/processing/svd_utils.py | 1 | 20291 | # -*- coding: utf-8 -*-
"""
USID utilities for performing randomized singular value decomposition and reconstructing results
Created on Mon Mar 28 09:45:08 2016
@author: Suhas Somnath, Chris Smith
"""
from __future__ import division, print_function, absolute_import
import time
from multiprocessing import cpu_count
import numpy as np
from sklearn.utils import gen_batches
from sklearn.utils.extmath import randomized_svd
from sidpy.hdf.reg_ref import get_indices_for_region_ref, create_region_reference
from sidpy.hdf.hdf_utils import get_attr, write_simple_attrs, copy_attributes
from sidpy.proc.comp_utils import get_available_memory
from sidpy.base.string_utils import format_time
from sidpy.hdf.dtype_utils import check_dtype, stack_real_to_target_dtype
from pyUSID.processing.process import Process
from .proc_utils import get_component_slice
from pyUSID.io.hdf_utils import find_results_groups, \
reshape_to_n_dims, write_main_dataset, create_results_group, \
create_indexed_group, find_dataset
from pyUSID import Dimension
from pyUSID.io.anc_build_utils import calc_chunks
from pyUSID import USIDataset
import h5py
from matplotlib import pyplot as plt
from pyUSID.viz import plot_utils
class SVD(Process):
"""
This class provides a file-wrapper around the :meth:`sklearn.utils.extmath.randomized_svd` function.
In other words, it extracts and then reformats the data present in the provided :class:`pyUSID.USIDataset` object,
performs the randomized SVD operation and writes the results back to the USID HDF5 file after
formatting the results in an USID compliant manner.
"""
def __init__(self, h5_main, num_components=None, **kwargs):
"""
Perform the SVD decomposition on the selected dataset and write the results to h5 file.
Parameters
----------
h5_main : :class:`pyUSID.USIDataset` object
USID Main HDF5 dataset that will be decomposed
num_components : int, optional
Number of components to decompose h5_main into. Default None.
h5_target_group : h5py.Group, optional. Default = None
Location where to look for existing results and to place newly
computed results. Use this kwarg if the results need to be written
to a different HDF5 file. By default, this value is set to the
parent group containing `h5_main`
kwargs
Arguments to be sent to Process
"""
super(SVD, self).__init__(h5_main, 'SVD', **kwargs)
'''
Calculate the size of the main data in memory and compare to max_mem
We use the minimum of the actual dtype's itemsize and float32 since we
don't want to read it in yet and do the proper type conversions.
'''
n_samples, n_features = h5_main.shape
self.data_transform_func, is_complex, is_compound, n_features, type_mult = check_dtype(h5_main)
if num_components is None:
num_components = min(n_samples, n_features)
else:
num_components = min(n_samples, n_features, num_components)
self.num_components = num_components
# Check that we can actually compute the SVD with the selected number of components
self._check_available_mem()
self.parms_dict = {'num_components': num_components}
self.duplicate_h5_groups, self.partial_h5_groups = self._check_for_duplicates()
# supercharge h5_main!
self.h5_main = USIDataset(self.h5_main)
self.__u = None
self.__v = None
self.__s = None
def test(self, override=False):
"""
Applies randomised VD to the dataset. This function does NOT write results to the hdf5 file. Call compute() to
write to the file. Handles complex, compound datasets such that the V matrix is of the same data-type as the
input matrix.
Parameters
----------
override : bool, optional. default = False
Set to true to recompute results if prior results are available. Else, returns existing results
Returns
-------
U : :class:`numpy.ndarray`
Abundance matrix
S : :class:`numpy.ndarray`
variance vector
V : :class:`numpy.ndarray`
eigenvector matrix
"""
'''
Check if a number of compnents has been set and ensure that the number is less than
the minimum axis length of the data. If both conditions are met, use fsvd. If not
use the regular svd.
C.Smith -- We might need to put a lower limit on num_comps in the future. I don't
know enough about svd to be sure.
'''
if not override:
if isinstance(self.duplicate_h5_groups, list) and len(self.duplicate_h5_groups) > 0:
self.h5_results_grp = self.duplicate_h5_groups[-1]
print('Returning previously computed results from: {}'.format(self.h5_results_grp.name))
print('set the "override" flag to True to recompute results')
return reshape_to_n_dims(self.h5_results_grp['U'])[0], self.h5_results_grp['S'][()], \
reshape_to_n_dims(self.h5_results_grp['V'])[0]
self.h5_results_grp = None
t1 = time.time()
self.__u, self.__s, self.__v = randomized_svd(self.data_transform_func(self.h5_main), self.num_components,
n_iter=3)
self.__v = stack_real_to_target_dtype(self.__v, self.h5_main.dtype)
print('Took {} to compute randomized SVD'.format(format_time(time.time() - t1)))
u_mat, success = reshape_to_n_dims(self.__u, h5_pos=self.h5_main.h5_pos_inds,
h5_spec=np.expand_dims(np.arange(self.__u.shape[1]), axis=0))
if not success:
raise ValueError('Could not reshape U to N-Dimensional dataset! Error:' + success)
# When the source dataset has a singular valued spectroscopic dimension
# stack_real_to_target causes V to lose all its dimensions
if self.__v.ndim == 0:
# However, we want V to be 2D:
self.__v = np.atleast_2d(self.__v)
v_mat, success = reshape_to_n_dims(self.__v, h5_pos=np.expand_dims(np.arange(self.__u.shape[1]), axis=1),
h5_spec=self.h5_main.h5_spec_inds)
if not success:
raise ValueError('Could not reshape V to N-Dimensional dataset! Error:' + success)
return u_mat, self.__s, v_mat
def compute(self, override=False):
"""
Computes SVD (by calling test_on_subset() if it has not already been called) and writes results to file.
Consider calling test() to check results before writing to file. Results are deleted from memory
upon writing to the HDF5 file
Parameters
----------
override : bool, optional. default = False
Set to true to recompute results if prior results are available. Else, returns existing results
Returns
-------
h5_results_grp : :class:`h5py.Group` object
HDF5 Group containing all the results
"""
if self.__u is None and self.__v is None and self.__s is None:
self.test(override=override)
if self.h5_results_grp is None:
self._write_results_chunk()
self.delete_results()
h5_group = self.h5_results_grp
return h5_group
def delete_results(self):
"""
Deletes results from memory.
"""
del self.__u, self.__s, self.__v
self.__u = None
self.__v = None
self.__s = None
def _write_results_chunk(self):
"""
Writes the provided SVD results to file
Parameters
----------
"""
comp_dim = Dimension('Principal Component', 'a. u.', len(self.__s))
h5_svd_group = create_results_group(self.h5_main, self.process_name,
h5_parent_group=self._h5_target_group)
self.h5_results_grp = h5_svd_group
self._write_source_dset_provenance()
write_simple_attrs(h5_svd_group, self.parms_dict)
write_simple_attrs(h5_svd_group, {'svd_method': 'sklearn-randomized'})
h5_u = write_main_dataset(h5_svd_group, np.float32(self.__u), 'U', 'Abundance', 'a.u.', None, comp_dim,
h5_pos_inds=self.h5_main.h5_pos_inds, h5_pos_vals=self.h5_main.h5_pos_vals,
dtype=np.float32, chunks=calc_chunks(self.__u.shape, np.float32(0).itemsize))
# print(get_attr(self.h5_main, 'quantity')[0])
h5_v = write_main_dataset(h5_svd_group, self.__v, 'V', get_attr(self.h5_main, 'quantity')[0],
'a.u.', comp_dim, None, h5_spec_inds=self.h5_main.h5_spec_inds,
h5_spec_vals=self.h5_main.h5_spec_vals,
chunks=calc_chunks(self.__v.shape, self.h5_main.dtype.itemsize))
# No point making this 1D dataset a main dataset
h5_s = h5_svd_group.create_dataset('S', data=np.float32(self.__s))
'''
Check h5_main for plot group references.
Copy them into V if they exist
'''
for key in self.h5_main.attrs.keys():
if '_Plot_Group' not in key:
continue
ref_inds = get_indices_for_region_ref(self.h5_main, self.h5_main.attrs[key], return_method='corners')
ref_inds = ref_inds.reshape([-1, 2, 2])
ref_inds[:, 1, 0] = h5_v.shape[0] - 1
svd_ref = create_region_reference(h5_v, ref_inds)
h5_v.attrs[key] = svd_ref
# Marking completion:
self._status_dset_name = 'completed_positions'
self._h5_status_dset = h5_svd_group.create_dataset(self._status_dset_name,
data=np.ones(self.h5_main.shape[0], dtype=np.uint8))
# keeping legacy option:
h5_svd_group.attrs['last_pixel'] = self.h5_main.shape[0]
def _check_available_mem(self):
"""
Check that there is enough memory to perform the SVD decomposition.
Returns
-------
sufficient_mem : bool
True is enough memory found, False otherwise.
"""
if self.verbose:
print('Checking memory availability.')
n_samples, n_features = self.h5_main.shape
s_mem_per_comp = np.float32(0).itemsize
u_mem_per_comp = np.float32(0).itemsize * n_samples
v_mem_per_comp = self.h5_main.dtype.itemsize * n_features
mem_per_comp = s_mem_per_comp + u_mem_per_comp + v_mem_per_comp
max_mem = get_available_memory()
avail_mem = 0.75 * max_mem
free_mem = avail_mem - self.h5_main.__sizeof__()
if free_mem <= 0:
error_message = 'Cannot load main dataset into memory.\n' + \
'Available memory is {}. Dataset needs {}.'.format(avail_mem,
self.h5_main.__sizeof__())
raise MemoryError(error_message)
if self.verbose:
print('Memory available for SVD is {}.'.format(free_mem))
print('Memory needed per component is {}.'.format(mem_per_comp))
cant_svd = (free_mem - self.num_components * mem_per_comp) <= 0
if cant_svd:
max_comps = np.floor(free_mem / mem_per_comp, dtype=int)
error_message = 'Not enough free memory for performing SVD with requested number of parameters.\n' + \
'Maximum possible parameters is {}.'.format(max_comps)
raise MemoryError(error_message)
###############################################################################
def simplified_kpca(kpca, source_data):
"""
Performs kernel PCA on the provided dataset and returns the familiar
eigenvector, eigenvalue, and scree matrices.
Note that the positions in the eigenvalues may need to be transposed
Parameters
----------
kpca : KernelPCA object
configured Kernel PCA object ready to perform analysis
source_data : 2D numpy array
Data arranged as [iteration, features] example - [position, time]
Returns
-------
eigenvalues : 2D numpy array
Eigenvalues in the original space arranged as [component,iteration]
scree : 1D numpy array
S component
eigenvector : 2D numpy array
Eigenvectors in the original space arranged as [component,features]
"""
X_kpca = kpca.fit(source_data.T)
eigenvectors = X_kpca.alphas_.T
eigenvalues = X_kpca.fit_transform(source_data)
# kpca_explained_variance = np.var(kpca.fit_transform(source_data), axis=0)
# information_content = kpca_explained_variance / np.sum(kpca_explained_variance)
scree = kpca.lambdas_
return eigenvalues, scree, eigenvectors
def rebuild_svd(h5_main, components=None, cores=None, max_RAM_mb=1024):
"""
Rebuild the Image from the SVD results on the windows
Optionally, only use components less than n_comp.
Parameters
----------
h5_main : hdf5 Dataset
dataset which SVD was performed on
components : {int, iterable of int, slice} optional
Defines which components to keep
Default - None, all components kept
Input Types
integer : Components less than the input will be kept
length 2 iterable of integers : Integers define start and stop of component slice to retain
other iterable of integers or slice : Selection of component indices to retain
cores : int, optional
How many cores should be used to rebuild
Default - None, all but 2 cores will be used, min 1
max_RAM_mb : int, optional
Maximum ammount of memory to use when rebuilding, in Mb.
Default - 1024Mb
Returns
-------
rebuilt_data : HDF5 Dataset
the rebuilt dataset
"""
comp_slice, num_comps = get_component_slice(components, total_components=h5_main.shape[1])
if isinstance(comp_slice, np.ndarray):
comp_slice = list(comp_slice)
dset_name = h5_main.name.split('/')[-1]
# Ensuring that at least one core is available for use / 2 cores are available for other use
max_cores = max(1, cpu_count() - 2)
# print('max_cores',max_cores)
if cores is not None:
cores = min(round(abs(cores)), max_cores)
else:
cores = max_cores
max_memory = min(max_RAM_mb * 1024 ** 2, 0.75 * get_available_memory())
if cores != 1:
max_memory = int(max_memory / 2)
'''
Get the handles for the SVD results
'''
try:
h5_svd_group = find_results_groups(h5_main, 'SVD')[-1]
h5_S = h5_svd_group['S']
h5_U = h5_svd_group['U']
h5_V = h5_svd_group['V']
except KeyError:
raise KeyError('SVD Results for {dset} were not found.'.format(dset=dset_name))
except:
raise
func, is_complex, is_compound, n_features, type_mult = check_dtype(h5_V)
'''
Calculate the size of a single batch that will fit in the available memory
'''
n_comps = h5_S[comp_slice].size
mem_per_pix = (h5_U.dtype.itemsize + h5_V.dtype.itemsize * h5_V.shape[1]) * n_comps
fixed_mem = h5_main.size * h5_main.dtype.itemsize
if cores is None:
free_mem = max_memory - fixed_mem
else:
free_mem = max_memory * 2 - fixed_mem
batch_size = int(round(float(free_mem) / mem_per_pix))
batch_slices = gen_batches(h5_U.shape[0], batch_size)
print('Reconstructing in batches of {} positions.'.format(batch_size))
print('Batchs should be {} Mb each.'.format(mem_per_pix * batch_size / 1024.0 ** 2))
'''
Loop over all batches.
'''
ds_V = np.dot(np.diag(h5_S[comp_slice]), func(h5_V[comp_slice, :]))
rebuild = np.zeros((h5_main.shape[0], ds_V.shape[1]))
for ibatch, batch in enumerate(batch_slices):
rebuild[batch, :] += np.dot(h5_U[batch, comp_slice], ds_V)
rebuild = stack_real_to_target_dtype(rebuild, h5_V.dtype)
print('Completed reconstruction of data from SVD results. Writing to file.')
'''
Create the Group and dataset to hold the rebuild data
'''
rebuilt_grp = create_indexed_group(h5_svd_group, 'Rebuilt_Data')
h5_rebuilt = write_main_dataset(rebuilt_grp, rebuild, 'Rebuilt_Data',
get_attr(h5_main, 'quantity'), get_attr(h5_main, 'units'),
None, None,
h5_pos_inds=h5_main.h5_pos_inds, h5_pos_vals=h5_main.h5_pos_vals,
h5_spec_inds=h5_main.h5_spec_inds, h5_spec_vals=h5_main.h5_spec_vals,
chunks=h5_main.chunks, compression=h5_main.compression)
if isinstance(comp_slice, slice):
rebuilt_grp.attrs['components_used'] = '{}-{}'.format(comp_slice.start, comp_slice.stop)
else:
rebuilt_grp.attrs['components_used'] = components
copy_attributes(h5_main, h5_rebuilt, skip_refs=False)
h5_main.file.flush()
print('Done writing reconstructed data to file.')
return h5_rebuilt
def plot_svd(h5_main, savefig=False, num_plots = 16, **kwargs):
'''
Replots the SVD showing the skree, abundance maps, and eigenvectors.
If h5_main is a Dataset, it will default to the most recent SVD group from that
Dataset.
If h5_main is the results group, then it will plot the values for that group.
Parameters
----------
h5_main : USIDataset or h5py Dataset or h5py Group
savefig : bool, optional
Saves the figures to disk with some default names
num_plots : int
Default number of eigenvectors and abundance plots to show
kwargs : dict, optional
keyword arguments for svd filtering
Returns
-------
None
'''
if isinstance(h5_main, h5py.Group):
_U = find_dataset(h5_main, 'U')[-1]
_V = find_dataset(h5_main, 'V')[-1]
units = 'arbitrary (a.u.)'
h5_spec_vals = np.arange(_V.shape[1])
h5_svd_group = _U.parent
else:
h5_svd_group = find_results_groups(h5_main, 'SVD')[-1]
units = h5_main.attrs['quantity']
h5_spec_vals = h5_main.get_spec_values('Time')
h5_U = h5_svd_group['U']
h5_V = h5_svd_group['V']
h5_S = h5_svd_group['S']
_U = USIDataset(h5_U)
[num_rows, num_cols] = _U.pos_dim_sizes
abun_maps = np.reshape(h5_U[:,:16], (num_rows, num_cols,-1))
eigen_vecs = h5_V[:16, :]
skree_sum = np.zeros(h5_S.shape)
for i in range(h5_S.shape[0]):
skree_sum[i] = np.sum(h5_S[:i])/np.sum(h5_S)
plt.figure()
plt.plot(skree_sum, 'bo')
plt.title('Cumulative Variance')
plt.xlabel('Total Components')
plt.ylabel('Total variance ratio (a.u.)')
if savefig:
plt.savefig('Cumulative_variance_plot.png')
fig_skree, axes = plot_utils.plot_scree(h5_S, title='Scree plot')
fig_skree.tight_layout()
if savefig:
plt.savefig('Scree_plot.png')
fig_abun, axes = plot_utils.plot_map_stack(abun_maps, num_comps=num_plots, title='SVD Abundance Maps',
color_bar_mode='single', cmap='inferno', reverse_dims=True,
fig_mult=(3.5,3.5), facecolor='white', **kwargs)
fig_abun.tight_layout()
if savefig:
plt.savefig('Abundance_maps.png')
fig_eigvec, axes = plot_utils.plot_curves(h5_spec_vals*1e3, eigen_vecs, use_rainbow_plots=False,
x_label='Time (ms)', y_label=units,
num_plots=num_plots, subtitle_prefix='Component',
title='SVD Eigenvectors', evenly_spaced=False,
**kwargs)
fig_eigvec.tight_layout()
if savefig:
plt.savefig('Eigenvectors.png')
return | mit |
U-MA/mal | rpython/step1_read_print.py | 50 | 1118 | #import sys, traceback
import mal_readline
import mal_types as types
import reader, printer
# read
def READ(str):
return reader.read_str(str)
# eval
def EVAL(ast, env):
#print("EVAL %s" % printer._pr_str(ast))
return ast
# print
def PRINT(exp):
return printer._pr_str(exp)
# repl
def REP(str):
return PRINT(EVAL(READ(str), {}))
def entry_point(argv):
#mal_readline.init()
while True:
try:
line = mal_readline.readline("user> ")
if line == "": continue
print(REP(line))
except EOFError as e:
break
except reader.Blank:
continue
except types.MalException as e:
print(u"Error: %s" % printer._pr_str(e.object, False))
except Exception as e:
print("Error: %s" % e)
#print("".join(traceback.format_exception(*sys.exc_info())))
return 0
# _____ Define and setup target ___
def target(*args):
return entry_point
# Just run entry_point if not RPython compilation
import sys
if not sys.argv[0].endswith('rpython'):
entry_point(sys.argv)
| mpl-2.0 |
codeworldprodigy/lab2 | lib/flask/flask/templating.py | 783 | 4707 | # -*- coding: utf-8 -*-
"""
flask.templating
~~~~~~~~~~~~~~~~
Implements the bridge to Jinja2.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import posixpath
from jinja2 import BaseLoader, Environment as BaseEnvironment, \
TemplateNotFound
from .globals import _request_ctx_stack, _app_ctx_stack
from .signals import template_rendered
from .module import blueprint_is_module
from ._compat import itervalues, iteritems
def _default_template_ctx_processor():
"""Default template context processor. Injects `request`,
`session` and `g`.
"""
reqctx = _request_ctx_stack.top
appctx = _app_ctx_stack.top
rv = {}
if appctx is not None:
rv['g'] = appctx.g
if reqctx is not None:
rv['request'] = reqctx.request
rv['session'] = reqctx.session
return rv
class Environment(BaseEnvironment):
"""Works like a regular Jinja2 environment but has some additional
knowledge of how Flask's blueprint works so that it can prepend the
name of the blueprint to referenced templates if necessary.
"""
def __init__(self, app, **options):
if 'loader' not in options:
options['loader'] = app.create_global_jinja_loader()
BaseEnvironment.__init__(self, **options)
self.app = app
class DispatchingJinjaLoader(BaseLoader):
"""A loader that looks for templates in the application and all
the blueprint folders.
"""
def __init__(self, app):
self.app = app
def get_source(self, environment, template):
for loader, local_name in self._iter_loaders(template):
try:
return loader.get_source(environment, local_name)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
def _iter_loaders(self, template):
loader = self.app.jinja_loader
if loader is not None:
yield loader, template
# old style module based loaders in case we are dealing with a
# blueprint that is an old style module
try:
module, local_name = posixpath.normpath(template).split('/', 1)
blueprint = self.app.blueprints[module]
if blueprint_is_module(blueprint):
loader = blueprint.jinja_loader
if loader is not None:
yield loader, local_name
except (ValueError, KeyError):
pass
for blueprint in itervalues(self.app.blueprints):
if blueprint_is_module(blueprint):
continue
loader = blueprint.jinja_loader
if loader is not None:
yield loader, template
def list_templates(self):
result = set()
loader = self.app.jinja_loader
if loader is not None:
result.update(loader.list_templates())
for name, blueprint in iteritems(self.app.blueprints):
loader = blueprint.jinja_loader
if loader is not None:
for template in loader.list_templates():
prefix = ''
if blueprint_is_module(blueprint):
prefix = name + '/'
result.add(prefix + template)
return list(result)
def _render(template, context, app):
"""Renders the template and fires the signal"""
rv = template.render(context)
template_rendered.send(app, template=template, context=context)
return rv
def render_template(template_name_or_list, **context):
"""Renders a template from the template folder with the given
context.
:param template_name_or_list: the name of the template to be
rendered, or an iterable with template names
the first one existing will be rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list),
context, ctx.app)
def render_template_string(source, **context):
"""Renders a template from the given template source string
with the given context.
:param source: the sourcecode of the template to be
rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.from_string(source),
context, ctx.app)
| apache-2.0 |
xe1gyq/nuupxe | modules/aprstracker.py | 1 | 3741 | #!/usr/bin/python
import ConfigParser
import feedparser
import logging
import os
import pywapi
import string
import sys
import time
import unicodedata
from core.alive import alive
from pygeocoder import Geocoder
from core.aprsfi import AprsFi
from core.voicesynthetizer import VoiceSynthetizer
from core.phonetic import Phonetic
days = {'Monday': 'Lunes', 'Tuesday': 'Martes', 'Wednesday': 'Miercoles',
'Thursday': 'Jueves', 'Friday': 'Viernes', 'Saturday': 'Sabado',
'Sunday': 'Domingo',
}
months = {'January': 'Enero', 'February': 'Febrero', 'March': 'Marzo',
'April': 'Abril', 'May': 'Mayo', 'June': 'Junio',
'July': 'Julio', 'August': 'Agosto', 'September': 'Septiembre',
'October': 'Octubre', 'November' : 'Noviembre', 'December': 'Diciembre'
}
class AprsTracker(object):
def __init__(self, voicesynthetizer, callsign='XE1GYP-9'):
logging.info('[AprsTracker]')
self.speaker = voicesynthetizer
self.callsign = callsign
self.modulename = 'AprsTracker'
self.phonetic = Phonetic()
self.aprsfi = AprsFi()
self.conf = ConfigParser.ConfigParser()
self.path = "configuration/general.config"
self.conf.read(self.path)
def time(self, aprstime):
logging.info('[AprsTracker] Time')
weekday = days[time.strftime("%A", time.gmtime(int(aprstime)))]
day = time.strftime("%d", time.gmtime(int(aprstime))).lstrip('0')
month = months[time.strftime("%B", time.gmtime(int(aprstime)))]
year = time.strftime("%Y", time.gmtime(int(aprstime)))
return weekday, day, month, year
def localize(self):
logging.info('[AprsTracker] Localize')
self.speaker.speechit("Localizacion de estaciones a traves de a p r s punto f i")
self.aprsfi.callsignset(self.callsign)
self.aprsfi.dataset('loc')
data = self.aprsfi.query()
logging.info(data)
station = "Estacion " + self.callsign
stationdecoded = "Estacion " + ' '.join(self.phonetic.decode(self.callsign))
if data.get('entries'):
for entry in data['entries']:
weekday, day, month, year = self.time(entry['lasttime'])
message = ", Ultima vez visto " + weekday + ' ' + day + ' de ' + month + ' del ' + year
try:
message = message + ", Velocidad " + str(entry['speed']) + " Km/h"
except:
pass
try:
message = message + ", Altitud " + str(entry['altitude']) + " metros"
except:
pass
results = Geocoder.reverse_geocode(float(entry['lat']), float(entry['lng']))
logging.info(results)
try:
message = message + ", Calle " + results[0].route
message = message + ", Colonia " + results[0].political
if results[0].administrative_area_level_2:
message = message + ", Municipio " + results[0].administrative_area_level_2
elif results[0].locality:
message = message + ", Municipio " + results[0].locality
message = message + ", " + results[0].administrative_area_level_1
message = message + ", " + results[0].country
except:
pass
speechmessage = stationdecoded + message
self.speaker.speechit(speechmessage)
modulemessage = station + message
alive(modulename=self.modulename, modulemessage=modulemessage)
else:
self.speaker.speechit(stationdecoded + " no ha reportado ubicacion!")
# End of File
| apache-2.0 |
nvoron23/avos | openstack_dashboard/dashboards/admin/volumes/volume_types/qos_specs/forms.py | 14 | 2804 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
class CreateKeyValuePair(forms.SelfHandlingForm):
# this if for creating a spec key-value pair for an existing QOS Spec
key = forms.CharField(max_length=255, label=_("Key"))
value = forms.CharField(max_length=255, label=_("Value"))
def handle(self, request, data):
qos_spec_id = self.initial['qos_spec_id']
try:
# first retrieve current value of specs
specs = api.cinder.qos_spec_get(request, qos_spec_id)
# now add new key-value pair to list of specs
specs.specs[data['key']] = data['value']
api.cinder.qos_spec_set_keys(request,
qos_spec_id,
specs.specs)
msg = _('Created spec "%s".') % data['key']
messages.success(request, msg)
return True
except Exception:
exceptions.handle(request,
_("Unable to create spec."))
return False
class EditKeyValuePair(forms.SelfHandlingForm):
value = forms.CharField(max_length=255, label=_("Value"))
# update the backend with the new qos spec value
def handle(self, request, data):
key = self.initial['key']
qos_spec_id = self.initial['qos_spec_id']
# build up new 'specs' object with all previous values plus new value
try:
# first retrieve current value of specs
specs = api.cinder.qos_spec_get_keys(request,
qos_spec_id,
raw=True)
specs.specs[key] = data['value']
api.cinder.qos_spec_set_keys(request,
qos_spec_id,
specs.specs)
msg = _('Saved spec "%s".') % key
messages.success(request, msg)
return True
except Exception:
exceptions.handle(request,
_("Unable to edit spec."))
return False | apache-2.0 |
stonebig/numba | numba/tests/test_looplifting.py | 2 | 17457 | from io import StringIO
import numpy as np
from numba.core import types
from numba.core.compiler import compile_isolated, Flags
from numba.tests.support import TestCase, tag, MemoryLeakMixin
import unittest
looplift_flags = Flags()
looplift_flags.enable_pyobject = True
looplift_flags.enable_looplift = True
pyobject_looplift_flags = looplift_flags.copy()
pyobject_looplift_flags.enable_pyobject_looplift = True
def lift1(x):
# Outer needs object mode because of np.empty()
a = np.empty(3)
for i in range(a.size):
# Inner is nopython-compliant
a[i] = x
return a
def lift2(x):
# Outer needs object mode because of np.empty()
a = np.empty((3, 4))
for i in range(a.shape[0]):
for j in range(a.shape[1]):
# Inner is nopython-compliant
a[i, j] = x
return a
def lift3(x):
# Output variable from the loop
_ = object()
a = np.arange(5, dtype=np.int64)
c = 0
for i in range(a.shape[0]):
c += a[i] * x
return c
def lift4(x):
# Output two variables from the loop
_ = object()
a = np.arange(5, dtype=np.int64)
c = 0
d = 0
for i in range(a.shape[0]):
c += a[i] * x
d += c
return c + d
def lift5(x):
_ = object()
a = np.arange(4)
for i in range(a.shape[0]):
# Inner has a break statement
if i > 2:
break
return a
def lift_gen1(x):
# Outer needs object mode because of np.empty()
a = np.empty(3)
yield 0
for i in range(a.size):
# Inner is nopython-compliant
a[i] = x
yield np.sum(a)
def lift_issue2561():
np.empty(1) # This forces objectmode because no nrt
for i in range(10):
for j in range(10):
return 1
return 2
def reject1(x):
a = np.arange(4)
for i in range(a.shape[0]):
# Inner returns a variable from outer "scope" => cannot loop-lift
return a
return a
def reject_gen1(x):
_ = object()
a = np.arange(4)
for i in range(a.shape[0]):
# Inner is a generator => cannot loop-lift
yield a[i]
def reject_gen2(x):
_ = object()
a = np.arange(3)
for i in range(a.size):
# Middle has a yield => cannot loop-lift
res = a[i] + x
for j in range(i):
# Inner is nopython-compliant, but the current algorithm isn't
# able to separate it.
res = res ** 2
yield res
def reject_npm1(x):
a = np.empty(3, dtype=np.int32)
for i in range(a.size):
# Inner uses object() => cannot loop-lift
_ = object()
a[i] = np.arange(i + 1)[i]
return a
class TestLoopLifting(MemoryLeakMixin, TestCase):
def try_lift(self, pyfunc, argtypes):
cres = compile_isolated(pyfunc, argtypes,
flags=looplift_flags)
# One lifted loop
self.assertEqual(len(cres.lifted), 1)
return cres
def assert_lifted_native(self, cres):
# Check if we have lifted in nopython mode
jitloop = cres.lifted[0]
[loopcres] = jitloop.overloads.values()
self.assertTrue(loopcres.fndesc.native) # Lifted function is native
def check_lift_ok(self, pyfunc, argtypes, args):
"""
Check that pyfunc can loop-lift even in nopython mode.
"""
cres = self.try_lift(pyfunc, argtypes)
expected = pyfunc(*args)
got = cres.entry_point(*args)
self.assert_lifted_native(cres)
# Check return values
self.assertPreciseEqual(expected, got)
def check_lift_generator_ok(self, pyfunc, argtypes, args):
"""
Check that pyfunc (a generator function) can loop-lift even in
nopython mode.
"""
cres = self.try_lift(pyfunc, argtypes)
expected = list(pyfunc(*args))
got = list(cres.entry_point(*args))
self.assert_lifted_native(cres)
# Check return values
self.assertPreciseEqual(expected, got)
def check_no_lift(self, pyfunc, argtypes, args):
"""
Check that pyfunc can't loop-lift.
"""
cres = compile_isolated(pyfunc, argtypes,
flags=looplift_flags)
self.assertFalse(cres.lifted)
expected = pyfunc(*args)
got = cres.entry_point(*args)
# Check return values
self.assertPreciseEqual(expected, got)
def check_no_lift_generator(self, pyfunc, argtypes, args):
"""
Check that pyfunc (a generator function) can't loop-lift.
"""
cres = compile_isolated(pyfunc, argtypes,
flags=looplift_flags)
self.assertFalse(cres.lifted)
expected = list(pyfunc(*args))
got = list(cres.entry_point(*args))
self.assertPreciseEqual(expected, got)
def check_no_lift_nopython(self, pyfunc, argtypes, args):
"""
Check that pyfunc will fail loop-lifting if pyobject mode
is disabled inside the loop, succeed otherwise.
"""
cres = compile_isolated(pyfunc, argtypes,
flags=looplift_flags)
self.assertTrue(cres.lifted)
with self.assertTypingError():
cres.entry_point(*args)
cres = compile_isolated(pyfunc, argtypes,
flags=pyobject_looplift_flags)
self.assertTrue(cres.lifted)
expected = pyfunc(*args)
got = cres.entry_point(*args)
self.assertPreciseEqual(expected, got)
def test_lift1(self):
self.check_lift_ok(lift1, (types.intp,), (123,))
def test_lift2(self):
self.check_lift_ok(lift2, (types.intp,), (123,))
def test_lift3(self):
self.check_lift_ok(lift3, (types.intp,), (123,))
def test_lift4(self):
self.check_lift_ok(lift4, (types.intp,), (123,))
def test_lift5(self):
self.check_lift_ok(lift5, (types.intp,), (123,))
def test_lift_issue2561(self):
self.check_no_lift(lift_issue2561, (), ())
def test_lift_gen1(self):
self.check_lift_generator_ok(lift_gen1, (types.intp,), (123,))
def test_reject1(self):
self.check_no_lift(reject1, (types.intp,), (123,))
def test_reject_gen1(self):
self.check_no_lift_generator(reject_gen1, (types.intp,), (123,))
def test_reject_gen2(self):
self.check_no_lift_generator(reject_gen2, (types.intp,), (123,))
def test_reject_npm1(self):
self.check_no_lift_nopython(reject_npm1, (types.intp,), (123,))
class TestLoopLiftingAnnotate(TestCase):
def test_annotate_1(self):
"""
Verify that annotation works as expected with one lifted loop
"""
from numba import jit
# dummy function to force objmode
def bar():
pass
def foo(x):
bar() # force obj
for i in range(x.size):
x[i] += 1
return x
cfoo = jit(foo)
x = np.arange(10)
xcopy = x.copy()
r = cfoo(x)
np.testing.assert_equal(r, xcopy + 1)
buf = StringIO()
cfoo.inspect_types(file=buf)
annotation = buf.getvalue()
buf.close()
self.assertIn("The function contains lifted loops", annotation)
line = foo.__code__.co_firstlineno + 2 # 2 lines down from func head
self.assertIn("Loop at line {line}".format(line=line), annotation)
self.assertIn("Has 1 overloads", annotation)
def test_annotate_2(self):
"""
Verify that annotation works as expected with two lifted loops
"""
from numba import jit
# dummy function to force objmode
def bar():
pass
def foo(x):
bar() # force obj
# first lifted loop
for i in range(x.size):
x[i] += 1
# second lifted loop
for j in range(x.size):
x[j] *= 2
return x
cfoo = jit(foo)
x = np.arange(10)
xcopy = x.copy()
r = cfoo(x)
np.testing.assert_equal(r, (xcopy + 1) * 2)
buf = StringIO()
cfoo.inspect_types(file=buf)
annotation = buf.getvalue()
buf.close()
self.assertIn("The function contains lifted loops", annotation)
line1 = foo.__code__.co_firstlineno + 3 # 3 lines down from func head
line2 = foo.__code__.co_firstlineno + 6 # 6 lines down from func head
self.assertIn("Loop at line {line}".format(line=line1), annotation)
self.assertIn("Loop at line {line}".format(line=line2), annotation)
class TestLoopLiftingInAction(MemoryLeakMixin, TestCase):
def assert_has_lifted(self, jitted, loopcount):
lifted = jitted.overloads[jitted.signatures[0]].lifted
self.assertEqual(len(lifted), loopcount)
def test_issue_734(self):
from numba import jit, void, int32, double
@jit(void(int32, double[:]), forceobj=True)
def forloop_with_if(u, a):
if u == 0:
for i in range(a.shape[0]):
a[i] = a[i] * 2.0
else:
for i in range(a.shape[0]):
a[i] = a[i] + 1.0
for u in (0, 1):
nb_a = np.arange(10, dtype='int32')
np_a = np.arange(10, dtype='int32')
forloop_with_if(u, nb_a)
forloop_with_if.py_func(u, np_a)
self.assertPreciseEqual(nb_a, np_a)
def test_issue_812(self):
from numba import jit
@jit('f8[:](f8[:])', forceobj=True)
def test(x):
res = np.zeros(len(x))
ind = 0
for ii in range(len(x)):
ind += 1
res[ind] = x[ind]
if x[ind] >= 10:
break
# Invalid loopjitting will miss the usage of `ind` in the
# following loop.
for ii in range(ind + 1, len(x)):
res[ii] = 0
return res
x = np.array([1., 4, 2, -3, 5, 2, 10, 5, 2, 6])
np.testing.assert_equal(test.py_func(x), test(x))
def test_issue_2368(self):
from numba import jit
def lift_issue2368(a, b):
s = 0
for e in a:
s += e
h = b.__hash__()
return s, h
a = np.ones(10)
b = object()
jitted = jit(lift_issue2368)
expected = lift_issue2368(a, b)
got = jitted(a, b)
self.assertEqual(expected[0], got[0])
self.assertEqual(expected[1], got[1])
jitloop = jitted.overloads[jitted.signatures[0]].lifted[0]
[loopcres] = jitloop.overloads.values()
# assert lifted function is native
self.assertTrue(loopcres.fndesc.native)
def test_no_iteration_w_redef(self):
# redefinition of res in the loop with no use of res should not
# prevent lifting
from numba import jit
@jit(forceobj=True)
def test(n):
res = 0
for i in range(n):
res = i
return res
# loop count = 1, loop lift but loop body not execute
self.assertEqual(test.py_func(-1), test(-1))
self.assert_has_lifted(test, loopcount=1)
# loop count = 1, loop will lift and will execute
self.assertEqual(test.py_func(1), test(1))
self.assert_has_lifted(test, loopcount=1)
def test_no_iteration(self):
from numba import jit
@jit(forceobj=True)
def test(n):
res = 0
for i in range(n):
res += i
return res
# loop count = 1
self.assertEqual(test.py_func(-1), test(-1))
self.assert_has_lifted(test, loopcount=1)
# loop count = 1
self.assertEqual(test.py_func(1), test(1))
self.assert_has_lifted(test, loopcount=1)
def test_define_in_loop_body(self):
# tests a definition in a loop that leaves the loop is liftable
from numba import jit
@jit(forceobj=True)
def test(n):
for i in range(n):
res = i
return res
# loop count = 1
self.assertEqual(test.py_func(1), test(1))
self.assert_has_lifted(test, loopcount=1)
def test_invalid_argument(self):
"""Test a problem caused by invalid discovery of loop argument
when a variable is used afterwards but not before.
Before the fix, this will result in::
numba.ir.NotDefinedError: 'i' is not defined
"""
from numba import jit
@jit(forceobj=True)
def test(arg):
if type(arg) == np.ndarray: # force object mode
if arg.ndim == 1:
result = 0.0
j = 0
for i in range(arg.shape[0]):
pass
else:
raise Exception
else:
result = 0.0
i, j = 0, 0
return result
arg = np.arange(10)
self.assertEqual(test.py_func(arg), test(arg))
def test_conditionally_defined_in_loop(self):
from numba import jit
@jit(forceobj=True)
def test():
x = 5
y = 0
for i in range(2):
if i > 0:
x = 6
y += x
return y, x
self.assertEqual(test.py_func(), test())
self.assert_has_lifted(test, loopcount=1)
def test_stack_offset_error_when_has_no_return(self):
from numba import jit
import warnings
def pyfunc(a):
if a:
for i in range(10):
pass
with warnings.catch_warnings():
warnings.simplefilter("error")
cfunc = jit(forceobj=True)(pyfunc)
self.assertEqual(pyfunc(True), cfunc(True))
def test_variable_scope_bug(self):
"""
https://github.com/numba/numba/issues/2179
Looplifting transformation is using the wrong version of variable `h`.
"""
from numba import jit
def bar(x):
return x
def foo(x):
h = 0.
for k in range(x):
h = h + k
h = h - bar(x)
return h
cfoo = jit(foo)
self.assertEqual(foo(10), cfoo(10))
def test_recompilation_loop(self):
"""
https://github.com/numba/numba/issues/2481
"""
from numba import jit
def foo(x, y):
# slicing to make array `x` into different layout
# to cause a new compilation of the lifted loop
A = x[::y]
c = 1
for k in range(A.size):
object() # to force objectmode and looplifting
c = c * A[::-1][k] # the slice that is failing in static_getitem
return c
cfoo = jit(foo)
# First run just works
args = np.arange(10), 1
self.assertEqual(foo(*args), cfoo(*args))
# Exactly 1 lifted loop so far
self.assertEqual(len(cfoo.overloads[cfoo.signatures[0]].lifted), 1)
lifted = cfoo.overloads[cfoo.signatures[0]].lifted[0]
# The lifted loop has 1 signature
self.assertEqual(len(lifted.signatures), 1)
# Use different argument to trigger a new compilation of the lifted loop
args = np.arange(10), -1
self.assertEqual(foo(*args), cfoo(*args))
# Ensure that is really a new overload for the lifted loop
self.assertEqual(len(lifted.signatures), 2)
def test_lift_listcomp_block0(self):
def foo(X):
[y for y in (1,)]
for x in (1,):
pass
return X
# this is not nice, if you have 2+? liftable loops with one of them
# being list comp and in block 0 and force objmode compilation is set,
# in py27 this leads to a BUILD_LIST that is a lifting candidate with an
# entry of block 0, this is a problem as the loop lift prelude would be
# written to block -1 and havoc ensues. Therefore block 0 loop lifts
# are banned under this set of circumstances.
# check all compile and execute
from numba import jit
f = jit()(foo)
f(1)
self.assertEqual(f.overloads[f.signatures[0]].lifted, ())
f = jit(forceobj=True)(foo)
f(1)
self.assertEqual(len(f.overloads[f.signatures[0]].lifted), 1)
def test_lift_objectmode_issue_4223(self):
from numba import jit
@jit
def foo(a, b, c, d, x0, y0, n):
xs, ys = np.zeros(n), np.zeros(n)
xs[0], ys[0] = x0, y0
for i in np.arange(n-1):
xs[i+1] = np.sin(a * ys[i]) + c * np.cos(a * xs[i])
ys[i+1] = np.sin(b * xs[i]) + d * np.cos(b * ys[i])
object() # ensure object mode
return xs, ys
kwargs = dict(a=1.7, b=1.7, c=0.6, d=1.2, x0=0, y0=0, n=200)
got = foo(**kwargs)
expected = foo.py_func(**kwargs)
self.assertPreciseEqual(got[0], expected[0])
self .assertPreciseEqual(got[1], expected[1])
[lifted] = foo.overloads[foo.signatures[0]].lifted
self.assertEqual(len(lifted.nopython_signatures), 1)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
designcc/django-ccgallery | ccgallery/tests/test_managers.py | 1 | 3771 | from decimal import Decimal
from django.test import TestCase
from ccgallery.models import Item, Category
class ManagerTestCases(TestCase):
def test_item_for_category(self):
"""the for item manager method"""
c1 = Category()
c1.slug = '1'
c1.title = '1'
c1.description = '1'
c1.status = Category.VISIBLE
c1.save()
# make the items
i1 = Item()
i1.slug = '1'
i1.title = '1'
i1.description = '1'
i1.status = Item.VISIBLE
i1.save()
i1.categories.add(c1)
i2 = Item()
i2.slug = '2'
i2.title = '2'
i2.description = '2'
i2.order = Decimal('2.00')
i2.status = Item.VISIBLE
i2.save()
i2.categories.add(c1)
# visible returns 1
self.assertEqual(2, Item.objects.for_category(c1).count())
# hidden returns 1
i2.status = Item.HIDDEN
i2.save()
self.assertEqual(1, Item.objects.for_category(c1).count())
# make the category invisible
c1.status = Category.HIDDEN
c1.save()
self.assertEqual(0, Item.objects.for_category(c1).count())
def test_category_visible(self):
"""only visible categorys are returned"""
i1 = Category()
i1.slug = '1'
i1.title = '1'
i1.description = '1'
i1.status = Category.VISIBLE
i1.save()
i2 = Category()
i2.slug = '2'
i2.title = '2'
i2.description = '2'
i2.order = Decimal('2.00')
i2.status = Category.VISIBLE
i2.save()
# visible returns 1
self.assertEqual(2, Category.objects.visible().count())
# hidden returns 1
i2.status = Category.HIDDEN
i2.save()
self.assertEqual(1, Category.objects.visible().count())
def test_category_hidden(self):
"""only hidden categorys are returned"""
i1 = Category()
i1.slug = '1'
i1.title = '1'
i1.description = '1'
i1.status = Category.VISIBLE
i1.save()
i2 = Category()
i2.slug = '2'
i2.title = '2'
i2.description = '2'
i2.order = Decimal('2.00')
i2.status = Category.VISIBLE
i2.save()
# visible returns 1
self.assertEqual(0, Category.objects.hidden().count())
# hidden returns 1
i2.status = Category.HIDDEN
i2.save()
self.assertEqual(1, Category.objects.hidden().count())
def test_item_visible(self):
"""only visible items are returned"""
i1 = Item()
i1.slug = '1'
i1.title = '1'
i1.description = '1'
i1.status = Item.VISIBLE
i1.save()
i2 = Item()
i2.slug = '2'
i2.title = '2'
i2.description = '2'
i2.order = Decimal('2.00')
i2.status = Item.VISIBLE
i2.save()
# visible returns 1
self.assertEqual(2, Item.objects.visible().count())
# hidden returns 1
i2.status = Item.HIDDEN
i2.save()
self.assertEqual(1, Item.objects.visible().count())
def test_item_hidden(self):
"""only hidden items are returned"""
i1 = Item()
i1.slug = '1'
i1.title = '1'
i1.description = '1'
i1.status = Item.VISIBLE
i1.save()
i2 = Item()
i2.slug = '2'
i2.title = '2'
i2.description = '2'
i2.order = Decimal('2.00')
i2.status = Item.VISIBLE
i2.save()
# visible returns 1
self.assertEqual(0, Item.objects.hidden().count())
# hidden returns 1
i2.status = Item.HIDDEN
i2.save()
self.assertEqual(1, Item.objects.hidden().count())
| bsd-3-clause |
fnouama/intellij-community | python/lib/Lib/site-packages/django/template/response.py | 71 | 4252 | from django.http import HttpResponse
from django.template import loader, Context, RequestContext
class ContentNotRenderedError(Exception):
pass
class SimpleTemplateResponse(HttpResponse):
def __init__(self, template, context=None, mimetype=None, status=None,
content_type=None):
# It would seem obvious to call these next two members 'template' and
# 'context', but those names are reserved as part of the test Client API.
# To avoid the name collision, we use
# tricky-to-debug problems
self.template_name = template
self.context_data = context
# _is_rendered tracks whether the template and context has been baked into
# a final response.
self._is_rendered = False
# content argument doesn't make sense here because it will be replaced
# with rendered template so we always pass empty string in order to
# prevent errors and provide shorter signature.
super(SimpleTemplateResponse, self).__init__('', mimetype, status,
content_type)
def resolve_template(self, template):
"Accepts a template object, path-to-template or list of paths"
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
return loader.get_template(template)
else:
return template
def resolve_context(self, context):
"""Convert context data into a full Context object
(assuming it isn't already a Context object).
"""
if isinstance(context, Context):
return context
else:
return Context(context)
@property
def rendered_content(self):
"""Returns the freshly rendered content for the template and context
described by the TemplateResponse.
This *does not* set the final content of the response. To set the
response content, you must either call render(), or set the
content explicitly using the value of this property.
"""
template = self.resolve_template(self.template_name)
context = self.resolve_context(self.context_data)
content = template.render(context)
return content
def render(self):
"""Render (thereby finalizing) the content of the response.
If the content has already been rendered, this is a no-op.
Returns the baked response instance.
"""
if not self._is_rendered:
self._set_content(self.rendered_content)
return self
is_rendered = property(lambda self: self._is_rendered)
def __iter__(self):
if not self._is_rendered:
raise ContentNotRenderedError('The response content must be rendered before it can be iterated over.')
return super(SimpleTemplateResponse, self).__iter__()
def _get_content(self):
if not self._is_rendered:
raise ContentNotRenderedError('The response content must be rendered before it can be accessed.')
return super(SimpleTemplateResponse, self)._get_content()
def _set_content(self, value):
"Overrides rendered content, unless you later call render()"
super(SimpleTemplateResponse, self)._set_content(value)
self._is_rendered = True
content = property(_get_content, _set_content)
class TemplateResponse(SimpleTemplateResponse):
def __init__(self, request, template, context=None, mimetype=None,
status=None, content_type=None):
# self.request gets over-written by django.test.client.Client - and
# unlike context_data and template_name the _request should not
# be considered part of the public API.
self._request = request
super(TemplateResponse, self).__init__(
template, context, mimetype, status, content_type)
def resolve_context(self, context):
"""Convert context data into a full RequestContext object
(assuming it isn't already a Context object).
"""
if isinstance(context, Context):
return context
else:
return RequestContext(self._request, context)
| apache-2.0 |
zace-yuan/viewfinder | marketing/tornado/test/process_test.py | 23 | 8746 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
import logging
import os
import signal
import subprocess
import sys
from tornado.httpclient import HTTPClient, HTTPError
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.log import gen_log
from tornado.process import fork_processes, task_id, Subprocess
from tornado.simple_httpclient import SimpleAsyncHTTPClient
from tornado.testing import bind_unused_port, ExpectLog, AsyncTestCase
from tornado.test.util import unittest, skipIfNonUnix
from tornado.web import RequestHandler, Application
def skip_if_twisted():
if IOLoop.configured_class().__name__.endswith('TwistedIOLoop'):
raise unittest.SkipTest("Process tests not compatible with TwistedIOLoop")
# Not using AsyncHTTPTestCase because we need control over the IOLoop.
@skipIfNonUnix
class ProcessTest(unittest.TestCase):
def get_app(self):
class ProcessHandler(RequestHandler):
def get(self):
if self.get_argument("exit", None):
# must use os._exit instead of sys.exit so unittest's
# exception handler doesn't catch it
os._exit(int(self.get_argument("exit")))
if self.get_argument("signal", None):
os.kill(os.getpid(),
int(self.get_argument("signal")))
self.write(str(os.getpid()))
return Application([("/", ProcessHandler)])
def tearDown(self):
if task_id() is not None:
# We're in a child process, and probably got to this point
# via an uncaught exception. If we return now, both
# processes will continue with the rest of the test suite.
# Exit now so the parent process will restart the child
# (since we don't have a clean way to signal failure to
# the parent that won't restart)
logging.error("aborting child process from tearDown")
logging.shutdown()
os._exit(1)
# In the surviving process, clear the alarm we set earlier
signal.alarm(0)
super(ProcessTest, self).tearDown()
def test_multi_process(self):
# This test can't work on twisted because we use the global reactor
# and have no way to get it back into a sane state after the fork.
skip_if_twisted()
with ExpectLog(gen_log, "(Starting .* processes|child .* exited|uncaught exception)"):
self.assertFalse(IOLoop.initialized())
sock, port = bind_unused_port()
def get_url(path):
return "http://127.0.0.1:%d%s" % (port, path)
# ensure that none of these processes live too long
signal.alarm(5) # master process
try:
id = fork_processes(3, max_restarts=3)
self.assertTrue(id is not None)
signal.alarm(5) # child processes
except SystemExit as e:
# if we exit cleanly from fork_processes, all the child processes
# finished with status 0
self.assertEqual(e.code, 0)
self.assertTrue(task_id() is None)
sock.close()
return
try:
if id in (0, 1):
self.assertEqual(id, task_id())
server = HTTPServer(self.get_app())
server.add_sockets([sock])
IOLoop.instance().start()
elif id == 2:
self.assertEqual(id, task_id())
sock.close()
# Always use SimpleAsyncHTTPClient here; the curl
# version appears to get confused sometimes if the
# connection gets closed before it's had a chance to
# switch from writing mode to reading mode.
client = HTTPClient(SimpleAsyncHTTPClient)
def fetch(url, fail_ok=False):
try:
return client.fetch(get_url(url))
except HTTPError as e:
if not (fail_ok and e.code == 599):
raise
# Make two processes exit abnormally
fetch("/?exit=2", fail_ok=True)
fetch("/?exit=3", fail_ok=True)
# They've been restarted, so a new fetch will work
int(fetch("/").body)
# Now the same with signals
# Disabled because on the mac a process dying with a signal
# can trigger an "Application exited abnormally; send error
# report to Apple?" prompt.
# fetch("/?signal=%d" % signal.SIGTERM, fail_ok=True)
# fetch("/?signal=%d" % signal.SIGABRT, fail_ok=True)
# int(fetch("/").body)
# Now kill them normally so they won't be restarted
fetch("/?exit=0", fail_ok=True)
# One process left; watch it's pid change
pid = int(fetch("/").body)
fetch("/?exit=4", fail_ok=True)
pid2 = int(fetch("/").body)
self.assertNotEqual(pid, pid2)
# Kill the last one so we shut down cleanly
fetch("/?exit=0", fail_ok=True)
os._exit(0)
except Exception:
logging.error("exception in child process %d", id, exc_info=True)
raise
@skipIfNonUnix
class SubprocessTest(AsyncTestCase):
def test_subprocess(self):
subproc = Subprocess([sys.executable, '-u', '-i'],
stdin=Subprocess.STREAM,
stdout=Subprocess.STREAM, stderr=subprocess.STDOUT,
io_loop=self.io_loop)
self.addCleanup(lambda: os.kill(subproc.pid, signal.SIGTERM))
subproc.stdout.read_until(b'>>> ', self.stop)
self.wait()
subproc.stdin.write(b"print('hello')\n")
subproc.stdout.read_until(b'\n', self.stop)
data = self.wait()
self.assertEqual(data, b"hello\n")
subproc.stdout.read_until(b">>> ", self.stop)
self.wait()
subproc.stdin.write(b"raise SystemExit\n")
subproc.stdout.read_until_close(self.stop)
data = self.wait()
self.assertEqual(data, b"")
def test_close_stdin(self):
# Close the parent's stdin handle and see that the child recognizes it.
subproc = Subprocess([sys.executable, '-u', '-i'],
stdin=Subprocess.STREAM,
stdout=Subprocess.STREAM, stderr=subprocess.STDOUT,
io_loop=self.io_loop)
self.addCleanup(lambda: os.kill(subproc.pid, signal.SIGTERM))
subproc.stdout.read_until(b'>>> ', self.stop)
self.wait()
subproc.stdin.close()
subproc.stdout.read_until_close(self.stop)
data = self.wait()
self.assertEqual(data, b"\n")
def test_stderr(self):
subproc = Subprocess([sys.executable, '-u', '-c',
r"import sys; sys.stderr.write('hello\n')"],
stderr=Subprocess.STREAM,
io_loop=self.io_loop)
self.addCleanup(lambda: os.kill(subproc.pid, signal.SIGTERM))
subproc.stderr.read_until(b'\n', self.stop)
data = self.wait()
self.assertEqual(data, b'hello\n')
def test_sigchild(self):
# Twisted's SIGCHLD handler and Subprocess's conflict with each other.
skip_if_twisted()
Subprocess.initialize(io_loop=self.io_loop)
self.addCleanup(Subprocess.uninitialize)
subproc = Subprocess([sys.executable, '-c', 'pass'],
io_loop=self.io_loop)
subproc.set_exit_callback(self.stop)
ret = self.wait()
self.assertEqual(ret, 0)
self.assertEqual(subproc.returncode, ret)
def test_sigchild_signal(self):
skip_if_twisted()
Subprocess.initialize(io_loop=self.io_loop)
self.addCleanup(Subprocess.uninitialize)
subproc = Subprocess([sys.executable, '-c',
'import time; time.sleep(30)'],
io_loop=self.io_loop)
subproc.set_exit_callback(self.stop)
os.kill(subproc.pid, signal.SIGTERM)
ret = self.wait()
self.assertEqual(subproc.returncode, ret)
self.assertEqual(ret, -signal.SIGTERM)
| apache-2.0 |
idegtiarov/ceilometer | ceilometer/tests/unit/network/statistics/opendaylight/test_driver.py | 12 | 66291 | #
# Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import mock
from oslotest import base
import six
from six import moves
from six.moves.urllib import parse as url_parse
from ceilometer.network.statistics.opendaylight import driver
@six.add_metaclass(abc.ABCMeta)
class _Base(base.BaseTestCase):
@abc.abstractproperty
def flow_data(self):
pass
@abc.abstractproperty
def port_data(self):
pass
@abc.abstractproperty
def table_data(self):
pass
@abc.abstractproperty
def topology_data(self):
pass
@abc.abstractproperty
def switch_data(self):
pass
@abc.abstractproperty
def user_links_data(self):
pass
@abc.abstractproperty
def active_hosts_data(self):
pass
@abc.abstractproperty
def inactive_hosts_data(self):
pass
fake_odl_url = url_parse.ParseResult('opendaylight',
'localhost:8080',
'controller/nb/v2',
None,
None,
None)
fake_params = url_parse.parse_qs('user=admin&password=admin&scheme=http&'
'container_name=default&auth=basic')
fake_params_multi_container = (
url_parse.parse_qs('user=admin&password=admin&scheme=http&'
'container_name=first&container_name=second&'
'auth=basic'))
def setUp(self):
super(_Base, self).setUp()
self.addCleanup(mock.patch.stopall)
self.driver = driver.OpenDayLightDriver()
self.get_flow_statistics = mock.patch(
'ceilometer.network.statistics.opendaylight.client.'
'StatisticsAPIClient.get_flow_statistics',
return_value=self.flow_data).start()
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'StatisticsAPIClient.get_table_statistics',
return_value=self.table_data).start()
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'StatisticsAPIClient.get_port_statistics',
return_value=self.port_data).start()
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'TopologyAPIClient.get_topology',
return_value=self.topology_data).start()
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'TopologyAPIClient.get_user_links',
return_value=self.user_links_data).start()
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'SwitchManagerAPIClient.get_nodes',
return_value=self.switch_data).start()
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'HostTrackerAPIClient.get_active_hosts',
return_value=self.active_hosts_data).start()
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'HostTrackerAPIClient.get_inactive_hosts',
return_value=self.inactive_hosts_data).start()
def _test_for_meter(self, meter_name, expected_data):
sample_data = self.driver.get_sample_data(meter_name,
self.fake_odl_url,
self.fake_params,
{})
for sample, expected in moves.zip(sample_data, expected_data):
self.assertEqual(expected[0], sample[0]) # check volume
self.assertEqual(expected[1], sample[1]) # check resource id
self.assertEqual(expected[2], sample[2]) # check resource metadata
self.assertIsNotNone(sample[3]) # timestamp
class TestOpenDayLightDriverSpecial(_Base):
flow_data = {"flowStatistics": []}
port_data = {"portStatistics": []}
table_data = {"tableStatistics": []}
topology_data = {"edgeProperties": []}
switch_data = {"nodeProperties": []}
user_links_data = {"userLinks": []}
active_hosts_data = {"hostConfig": []}
inactive_hosts_data = {"hostConfig": []}
def test_not_implemented_meter(self):
sample_data = self.driver.get_sample_data('egg',
self.fake_odl_url,
self.fake_params,
{})
self.assertIsNone(sample_data)
sample_data = self.driver.get_sample_data('switch.table.egg',
self.fake_odl_url,
self.fake_params,
{})
self.assertIsNone(sample_data)
def test_cache(self):
cache = {}
self.driver.get_sample_data('switch',
self.fake_odl_url,
self.fake_params,
cache)
self.driver.get_sample_data('switch',
self.fake_odl_url,
self.fake_params,
cache)
self.assertEqual(1, self.get_flow_statistics.call_count)
cache = {}
self.driver.get_sample_data('switch',
self.fake_odl_url,
self.fake_params,
cache)
self.assertEqual(2, self.get_flow_statistics.call_count)
def test_multi_container(self):
cache = {}
self.driver.get_sample_data('switch',
self.fake_odl_url,
self.fake_params_multi_container,
cache)
self.assertEqual(2, self.get_flow_statistics.call_count)
self.assertIn('network.statistics.opendaylight', cache)
odl_data = cache['network.statistics.opendaylight']
self.assertIn('first', odl_data)
self.assertIn('second', odl_data)
def test_http_error(self):
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'StatisticsAPIClient.get_flow_statistics',
side_effect=Exception()).start()
sample_data = self.driver.get_sample_data('switch',
self.fake_odl_url,
self.fake_params,
{})
self.assertEqual(0, len(sample_data))
mock.patch('ceilometer.network.statistics.opendaylight.client.'
'StatisticsAPIClient.get_flow_statistics',
side_effect=[Exception(), self.flow_data]).start()
cache = {}
self.driver.get_sample_data('switch',
self.fake_odl_url,
self.fake_params_multi_container,
cache)
self.assertIn('network.statistics.opendaylight', cache)
odl_data = cache['network.statistics.opendaylight']
self.assertIn('second', odl_data)
class TestOpenDayLightDriverSimple(_Base):
flow_data = {
"flowStatistics": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"flowStatistic": [
{
"flow": {
"match": {
"matchField": [
{
"type": "DL_TYPE",
"value": "2048"
},
{
"mask": "255.255.255.255",
"type": "NW_DST",
"value": "1.1.1.1"
}
]
},
"actions": {
"@type": "output",
"port": {
"id": "3",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
}
},
"hardTimeout": "0",
"id": "0",
"idleTimeout": "0",
"priority": "1"
},
"byteCount": "0",
"durationNanoseconds": "397000000",
"durationSeconds": "1828",
"packetCount": "0",
"tableId": "0"
},
]
}
]
}
port_data = {
"portStatistics": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"portStatistic": [
{
"nodeConnector": {
"id": "4",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
},
"collisionCount": "0",
"receiveBytes": "0",
"receiveCrcError": "0",
"receiveDrops": "0",
"receiveErrors": "0",
"receiveFrameError": "0",
"receiveOverRunError": "0",
"receivePackets": "0",
"transmitBytes": "0",
"transmitDrops": "0",
"transmitErrors": "0",
"transmitPackets": "0"
},
]
}
]
}
table_data = {
"tableStatistics": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"tableStatistic": [
{
"activeCount": "11",
"lookupCount": "816",
"matchedCount": "220",
"nodeTable": {
"id": "0",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
}
}
},
]
}
]
}
topology_data = {"edgeProperties": []}
switch_data = {
"nodeProperties": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"properties": {
"actions": {
"value": "4095"
},
"timeStamp": {
"name": "connectedSince",
"value": "1377291227877"
}
}
},
]
}
user_links_data = {"userLinks": []}
active_hosts_data = {"hostConfig": []}
inactive_hosts_data = {"hostConfig": []}
def test_meter_switch(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
"properties_actions": "4095",
"properties_timeStamp_connectedSince": "1377291227877"
}),
]
self._test_for_meter('switch', expected_data)
def test_meter_switch_port(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4',
}),
]
self._test_for_meter('switch.port', expected_data)
def test_meter_switch_port_receive_packets(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.receive.packets', expected_data)
def test_meter_switch_port_transmit_packets(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.transmit.packets', expected_data)
def test_meter_switch_port_receive_bytes(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.receive.bytes', expected_data)
def test_meter_switch_port_transmit_bytes(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.transmit.bytes', expected_data)
def test_meter_switch_port_receive_drops(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.receive.drops', expected_data)
def test_meter_switch_port_transmit_drops(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.transmit.drops', expected_data)
def test_meter_switch_port_receive_errors(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.receive.errors', expected_data)
def test_meter_switch_port_transmit_errors(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.transmit.errors', expected_data)
def test_meter_switch_port_receive_frame_error(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.receive.frame_error', expected_data)
def test_meter_switch_port_receive_overrun_error(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.receive.overrun_error',
expected_data)
def test_meter_switch_port_receive_crc_error(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.receive.crc_error', expected_data)
def test_meter_switch_port_collision_count(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
]
self._test_for_meter('switch.port.collision.count', expected_data)
def test_meter_switch_table(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
]
self._test_for_meter('switch.table', expected_data)
def test_meter_switch_table_active_entries(self):
expected_data = [
(11, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
]
self._test_for_meter('switch.table.active.entries', expected_data)
def test_meter_switch_table_lookup_packets(self):
expected_data = [
(816, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
]
self._test_for_meter('switch.table.lookup.packets', expected_data)
def test_meter_switch_table_matched_packets(self):
expected_data = [
(220, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
]
self._test_for_meter('switch.table.matched.packets', expected_data)
def test_meter_switch_flow(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"
}),
]
self._test_for_meter('switch.flow', expected_data)
def test_meter_switch_flow_duration_seconds(self):
expected_data = [
(1828, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.duration_seconds', expected_data)
def test_meter_switch_flow_duration_nanoseconds(self):
expected_data = [
(397000000, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.duration_nanoseconds', expected_data)
def test_meter_switch_flow_packets(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.packets', expected_data)
def test_meter_switch_flow_bytes(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.bytes', expected_data)
class TestOpenDayLightDriverComplex(_Base):
flow_data = {
"flowStatistics": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"flowStatistic": [
{
"flow": {
"match": {
"matchField": [
{
"type": "DL_TYPE",
"value": "2048"
},
{
"mask": "255.255.255.255",
"type": "NW_DST",
"value": "1.1.1.1"
}
]
},
"actions": {
"@type": "output",
"port": {
"id": "3",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
}
},
"hardTimeout": "0",
"id": "0",
"idleTimeout": "0",
"priority": "1"
},
"byteCount": "0",
"durationNanoseconds": "397000000",
"durationSeconds": "1828",
"packetCount": "0",
"tableId": "0"
},
{
"flow": {
"match": {
"matchField": [
{
"type": "DL_TYPE",
"value": "2048"
},
{
"mask": "255.255.255.255",
"type": "NW_DST",
"value": "1.1.1.2"
}
]
},
"actions": {
"@type": "output",
"port": {
"id": "4",
"node": {
"id": "00:00:00:00:00:00:00:03",
"type": "OF"
},
"type": "OF"
}
},
"hardTimeout": "0",
"id": "0",
"idleTimeout": "0",
"priority": "1"
},
"byteCount": "89",
"durationNanoseconds": "200000",
"durationSeconds": "5648",
"packetCount": "30",
"tableId": "1"
}
]
}
]
}
port_data = {
"portStatistics": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"portStatistic": [
{
"nodeConnector": {
"id": "4",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
},
"collisionCount": "0",
"receiveBytes": "0",
"receiveCrcError": "0",
"receiveDrops": "0",
"receiveErrors": "0",
"receiveFrameError": "0",
"receiveOverRunError": "0",
"receivePackets": "0",
"transmitBytes": "0",
"transmitDrops": "0",
"transmitErrors": "0",
"transmitPackets": "0"
},
{
"nodeConnector": {
"id": "3",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
},
"collisionCount": "0",
"receiveBytes": "12740",
"receiveCrcError": "0",
"receiveDrops": "0",
"receiveErrors": "0",
"receiveFrameError": "0",
"receiveOverRunError": "0",
"receivePackets": "182",
"transmitBytes": "12110",
"transmitDrops": "0",
"transmitErrors": "0",
"transmitPackets": "173"
},
{
"nodeConnector": {
"id": "2",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
},
"collisionCount": "0",
"receiveBytes": "12180",
"receiveCrcError": "0",
"receiveDrops": "0",
"receiveErrors": "0",
"receiveFrameError": "0",
"receiveOverRunError": "0",
"receivePackets": "174",
"transmitBytes": "12670",
"transmitDrops": "0",
"transmitErrors": "0",
"transmitPackets": "181"
},
{
"nodeConnector": {
"id": "1",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
},
"collisionCount": "0",
"receiveBytes": "0",
"receiveCrcError": "0",
"receiveDrops": "0",
"receiveErrors": "0",
"receiveFrameError": "0",
"receiveOverRunError": "0",
"receivePackets": "0",
"transmitBytes": "0",
"transmitDrops": "0",
"transmitErrors": "0",
"transmitPackets": "0"
},
{
"nodeConnector": {
"id": "0",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
},
"collisionCount": "0",
"receiveBytes": "0",
"receiveCrcError": "0",
"receiveDrops": "0",
"receiveErrors": "0",
"receiveFrameError": "0",
"receiveOverRunError": "0",
"receivePackets": "0",
"transmitBytes": "0",
"transmitDrops": "0",
"transmitErrors": "0",
"transmitPackets": "0"
}
]
}
]
}
table_data = {
"tableStatistics": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"tableStatistic": [
{
"activeCount": "11",
"lookupCount": "816",
"matchedCount": "220",
"nodeTable": {
"id": "0",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
}
}
},
{
"activeCount": "20",
"lookupCount": "10",
"matchedCount": "5",
"nodeTable": {
"id": "1",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
}
}
}
]
}
]
}
topology_data = {
"edgeProperties": [
{
"edge": {
"headNodeConnector": {
"id": "2",
"node": {
"id": "00:00:00:00:00:00:00:03",
"type": "OF"
},
"type": "OF"
},
"tailNodeConnector": {
"id": "2",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
}
},
"properties": {
"bandwidth": {
"value": 10000000000
},
"config": {
"value": 1
},
"name": {
"value": "s2-eth3"
},
"state": {
"value": 1
},
"timeStamp": {
"name": "creation",
"value": 1379527162648
}
}
},
{
"edge": {
"headNodeConnector": {
"id": "5",
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"type": "OF"
},
"tailNodeConnector": {
"id": "2",
"node": {
"id": "00:00:00:00:00:00:00:04",
"type": "OF"
},
"type": "OF"
}
},
"properties": {
"timeStamp": {
"name": "creation",
"value": 1379527162648
}
}
}
]
}
switch_data = {
"nodeProperties": [
{
"node": {
"id": "00:00:00:00:00:00:00:02",
"type": "OF"
},
"properties": {
"actions": {
"value": "4095"
},
"buffers": {
"value": "256"
},
"capabilities": {
"value": "199"
},
"description": {
"value": "None"
},
"macAddress": {
"value": "00:00:00:00:00:02"
},
"tables": {
"value": "-1"
},
"timeStamp": {
"name": "connectedSince",
"value": "1377291227877"
}
}
},
{
"node": {
"id": "00:00:00:00:00:00:00:03",
"type": "OF"
},
"properties": {
"actions": {
"value": "1024"
},
"buffers": {
"value": "512"
},
"capabilities": {
"value": "1000"
},
"description": {
"value": "Foo Bar"
},
"macAddress": {
"value": "00:00:00:00:00:03"
},
"tables": {
"value": "10"
},
"timeStamp": {
"name": "connectedSince",
"value": "1377291228000"
}
}
}
]
}
user_links_data = {
"userLinks": [
{
"dstNodeConnector": "OF|5@OF|00:00:00:00:00:00:00:05",
"name": "link1",
"srcNodeConnector": "OF|3@OF|00:00:00:00:00:00:00:02",
"status": "Success"
}
]
}
active_hosts_data = {
"hostConfig": [
{
"dataLayerAddress": "00:00:00:00:01:01",
"networkAddress": "1.1.1.1",
"nodeConnectorId": "9",
"nodeConnectorType": "OF",
"nodeId": "00:00:00:00:00:00:00:01",
"nodeType": "OF",
"staticHost": "false",
"vlan": "0"
},
{
"dataLayerAddress": "00:00:00:00:02:02",
"networkAddress": "2.2.2.2",
"nodeConnectorId": "1",
"nodeConnectorType": "OF",
"nodeId": "00:00:00:00:00:00:00:02",
"nodeType": "OF",
"staticHost": "true",
"vlan": "0"
}
]
}
inactive_hosts_data = {
"hostConfig": [
{
"dataLayerAddress": "00:00:00:01:01:01",
"networkAddress": "1.1.1.3",
"nodeConnectorId": "8",
"nodeConnectorType": "OF",
"nodeId": "00:00:00:00:00:00:00:01",
"nodeType": "OF",
"staticHost": "false",
"vlan": "0"
},
{
"dataLayerAddress": "00:00:00:01:02:02",
"networkAddress": "2.2.2.4",
"nodeConnectorId": "0",
"nodeConnectorType": "OF",
"nodeId": "00:00:00:00:00:00:00:02",
"nodeType": "OF",
"staticHost": "false",
"vlan": "1"
}
]
}
def test_meter_switch(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
"properties_actions": "4095",
"properties_buffers": "256",
"properties_capabilities": "199",
"properties_description": "None",
"properties_macAddress": "00:00:00:00:00:02",
"properties_tables": "-1",
"properties_timeStamp_connectedSince": "1377291227877"
}),
(1, "00:00:00:00:00:00:00:03", {
'controller': 'OpenDaylight',
'container': 'default',
"properties_actions": "1024",
"properties_buffers": "512",
"properties_capabilities": "1000",
"properties_description": "Foo Bar",
"properties_macAddress": "00:00:00:00:00:03",
"properties_tables": "10",
"properties_timeStamp_connectedSince": "1377291228000"
}),
]
self._test_for_meter('switch', expected_data)
def test_meter_switch_port(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4',
}),
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3',
'user_link_node_id': '00:00:00:00:00:00:00:05',
'user_link_node_port': '5',
'user_link_status': 'Success',
'user_link_name': 'link1',
}),
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2',
'topology_node_id': '00:00:00:00:00:00:00:03',
'topology_node_port': '2',
"topology_bandwidth": 10000000000,
"topology_config": 1,
"topology_name": "s2-eth3",
"topology_state": 1,
"topology_timeStamp_creation": 1379527162648
}),
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1',
'host_status': 'active',
'host_dataLayerAddress': '00:00:00:00:02:02',
'host_networkAddress': '2.2.2.2',
'host_staticHost': 'true',
'host_vlan': '0',
}),
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0',
'host_status': 'inactive',
'host_dataLayerAddress': '00:00:00:01:02:02',
'host_networkAddress': '2.2.2.4',
'host_staticHost': 'false',
'host_vlan': '1',
}),
]
self._test_for_meter('switch.port', expected_data)
def test_meter_switch_port_receive_packets(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(182, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(174, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.receive.packets', expected_data)
def test_meter_switch_port_transmit_packets(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(173, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(181, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.transmit.packets', expected_data)
def test_meter_switch_port_receive_bytes(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(12740, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(12180, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.receive.bytes', expected_data)
def test_meter_switch_port_transmit_bytes(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(12110, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(12670, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.transmit.bytes', expected_data)
def test_meter_switch_port_receive_drops(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.receive.drops', expected_data)
def test_meter_switch_port_transmit_drops(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.transmit.drops', expected_data)
def test_meter_switch_port_receive_errors(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.receive.errors', expected_data)
def test_meter_switch_port_transmit_errors(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.transmit.errors', expected_data)
def test_meter_switch_port_receive_frame_error(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.receive.frame_error', expected_data)
def test_meter_switch_port_receive_overrun_error(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.receive.overrun_error',
expected_data)
def test_meter_switch_port_receive_crc_error(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.receive.crc_error', expected_data)
def test_meter_switch_port_collision_count(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '4'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '3'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '2'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '1'}),
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'port': '0'}),
]
self._test_for_meter('switch.port.collision.count', expected_data)
def test_meter_switch_table(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1'}),
]
self._test_for_meter('switch.table', expected_data)
def test_meter_switch_table_active_entries(self):
expected_data = [
(11, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
(20, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1'}),
]
self._test_for_meter('switch.table.active.entries', expected_data)
def test_meter_switch_table_lookup_packets(self):
expected_data = [
(816, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
(10, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1'}),
]
self._test_for_meter('switch.table.lookup.packets', expected_data)
def test_meter_switch_table_matched_packets(self):
expected_data = [
(220, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0'}),
(5, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1'}),
]
self._test_for_meter('switch.table.matched.packets', expected_data)
def test_meter_switch_flow(self):
expected_data = [
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"
}),
(1, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.2",
"flow_actions_@type": "output",
"flow_actions_port_id": "4",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:03",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"
}),
]
self._test_for_meter('switch.flow', expected_data)
def test_meter_switch_flow_duration_seconds(self):
expected_data = [
(1828, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
(5648, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.2",
"flow_actions_@type": "output",
"flow_actions_port_id": "4",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:03",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.duration_seconds', expected_data)
def test_meter_switch_flow_duration_nanoseconds(self):
expected_data = [
(397000000, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
(200000, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.2",
"flow_actions_@type": "output",
"flow_actions_port_id": "4",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:03",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.duration_nanoseconds', expected_data)
def test_meter_switch_flow_packets(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
(30, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.2",
"flow_actions_@type": "output",
"flow_actions_port_id": "4",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:03",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.packets', expected_data)
def test_meter_switch_flow_bytes(self):
expected_data = [
(0, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '0',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.1",
"flow_actions_@type": "output",
"flow_actions_port_id": "3",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:02",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
(89, "00:00:00:00:00:00:00:02", {
'controller': 'OpenDaylight',
'container': 'default',
'table_id': '1',
'flow_id': '0',
"flow_match_matchField[0]_type": "DL_TYPE",
"flow_match_matchField[0]_value": "2048",
"flow_match_matchField[1]_mask": "255.255.255.255",
"flow_match_matchField[1]_type": "NW_DST",
"flow_match_matchField[1]_value": "1.1.1.2",
"flow_actions_@type": "output",
"flow_actions_port_id": "4",
"flow_actions_port_node_id": "00:00:00:00:00:00:00:03",
"flow_actions_port_node_type": "OF",
"flow_actions_port_type": "OF",
"flow_hardTimeout": "0",
"flow_idleTimeout": "0",
"flow_priority": "1"}),
]
self._test_for_meter('switch.flow.bytes', expected_data)
| apache-2.0 |
jkkm/binutils-gdb | gdb/testsuite/gdb.perf/single-step.py | 41 | 1269 | # Copyright (C) 2013-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from perftest import perftest
class SingleStep (perftest.TestCaseWithBasicMeasurements):
def __init__(self, step):
super (SingleStep, self).__init__ ("single-step")
self.step = step
def warm_up(self):
for _ in range(0, self.step):
gdb.execute("stepi", False, True)
def _run(self, r):
for _ in range(0, r):
gdb.execute("stepi", False, True)
def execute_test(self):
for i in range(1, 5):
func = lambda: self._run(i * self.step)
self.measure.measure(func, i * self.step)
| gpl-2.0 |
cricketclubucd/davisdragons | platform-tools/systrace/catapult/telemetry/telemetry/internal/results/results_options.py | 5 | 7947 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import codecs
import optparse
import os
import sys
import time
from py_utils import cloud_storage # pylint: disable=import-error
from telemetry.core import util
from telemetry.internal.results import chart_json_output_formatter
from telemetry.internal.results import csv_pivot_table_output_formatter
from telemetry.internal.results import gtest_progress_reporter
from telemetry.internal.results import histogram_set_json_output_formatter
from telemetry.internal.results import html_output_formatter
from telemetry.internal.results import json_output_formatter
from telemetry.internal.results import legacy_html_output_formatter
from telemetry.internal.results import page_test_results
from telemetry.internal.results import progress_reporter
# Allowed output formats. The default is the first item in the list.
_OUTPUT_FORMAT_CHOICES = ('html', 'gtest', 'json', 'chartjson',
'csv-pivot-table', 'histograms', 'legacy-html', 'none')
# Filenames to use for given output formats.
_OUTPUT_FILENAME_LOOKUP = {
'html': 'results.html',
'json': 'results.json',
'chartjson': 'results-chart.json',
'csv-pivot-table': 'results-pivot-table.csv',
'histograms': 'histograms.json',
'legacy-html': 'legacy-results.html'
}
def AddResultsOptions(parser):
group = optparse.OptionGroup(parser, 'Results options')
group.add_option('--output-format', action='append', dest='output_formats',
choices=_OUTPUT_FORMAT_CHOICES, default=[],
help='Output format. Defaults to "%%default". '
'Can be %s.' % ', '.join(_OUTPUT_FORMAT_CHOICES))
group.add_option('-o', '--output',
dest='output_file',
default=None,
help='Redirects output to a file. Defaults to stdout.')
group.add_option('--output-dir', default=util.GetBaseDir(),
help='Where to save output data after the run.')
group.add_option('--output-trace-tag',
default='',
help='Append a tag to the key of each result trace. Use '
'with html, csv-pivot-table output formats.')
group.add_option('--reset-results', action='store_true',
help='Delete all stored results.')
group.add_option('--upload-results', action='store_true',
help='Upload the results to cloud storage.')
group.add_option('--upload-bucket', default='output',
help='Storage bucket to use for the uploaded results. ' +
'Defaults to output bucket. Supported values are: ' +
', '.join(cloud_storage.BUCKET_ALIAS_NAMES) +
'; or a valid cloud storage bucket name.')
group.add_option('--results-label',
default=None,
help='Optional label to use for the results of a run .')
group.add_option('--suppress_gtest_report',
default=False,
help='Whether to suppress GTest progress report.')
parser.add_option_group(group)
def ProcessCommandLineArgs(parser, args):
# TODO(ariblue): Delete this flag entirely at some future data, when the
# existence of such a flag has been long forgotten.
if args.output_file:
parser.error('This flag is deprecated. Please use --output-dir instead.')
try:
os.makedirs(args.output_dir)
except OSError:
# Do nothing if the output directory already exists. Existing files will
# get overwritten.
pass
args.output_dir = os.path.expanduser(args.output_dir)
def _GetOutputStream(output_format, output_dir):
assert output_format in _OUTPUT_FORMAT_CHOICES, 'Must specify a valid format.'
assert output_format not in ('gtest', 'none'), (
'Cannot set stream for \'gtest\' or \'none\' output formats.')
assert output_format in _OUTPUT_FILENAME_LOOKUP, (
'No known filename for the \'%s\' output format' % output_format)
output_file = os.path.join(output_dir, _OUTPUT_FILENAME_LOOKUP[output_format])
# TODO(eakuefner): Factor this hack out after we rewrite HTMLOutputFormatter.
if output_format == 'html' or output_format == 'legacy-html':
open(output_file, 'a').close() # Create file if it doesn't exist.
return codecs.open(output_file, mode='r+', encoding='utf-8')
else:
return open(output_file, mode='w+')
def _GetProgressReporter(output_skipped_tests_summary, suppress_gtest_report):
if suppress_gtest_report:
return progress_reporter.ProgressReporter()
return gtest_progress_reporter.GTestProgressReporter(
sys.stdout, output_skipped_tests_summary=output_skipped_tests_summary)
def CreateResults(benchmark_metadata, options,
value_can_be_added_predicate=lambda v, is_first: True,
benchmark_enabled=True):
"""
Args:
options: Contains the options specified in AddResultsOptions.
"""
if not options.output_formats:
options.output_formats = [_OUTPUT_FORMAT_CHOICES[0]]
upload_bucket = None
if options.upload_results:
upload_bucket = options.upload_bucket
if upload_bucket in cloud_storage.BUCKET_ALIASES:
upload_bucket = cloud_storage.BUCKET_ALIASES[upload_bucket]
output_formatters = []
for output_format in options.output_formats:
if output_format == 'none' or output_format == "gtest":
continue
output_stream = _GetOutputStream(output_format, options.output_dir)
if output_format == 'csv-pivot-table':
output_formatters.append(
csv_pivot_table_output_formatter.CsvPivotTableOutputFormatter(
output_stream, trace_tag=options.output_trace_tag))
elif output_format == 'html':
output_formatters.append(html_output_formatter.HtmlOutputFormatter(
output_stream, benchmark_metadata, options.reset_results,
upload_bucket))
elif output_format == 'json':
output_formatters.append(json_output_formatter.JsonOutputFormatter(
output_stream, benchmark_metadata))
elif output_format == 'chartjson':
output_formatters.append(
chart_json_output_formatter.ChartJsonOutputFormatter(
output_stream, benchmark_metadata))
elif output_format == 'histograms':
output_formatters.append(
histogram_set_json_output_formatter.HistogramSetJsonOutputFormatter(
output_stream, benchmark_metadata, options.reset_results))
elif output_format == 'legacy-html':
output_formatters.append(
legacy_html_output_formatter.LegacyHtmlOutputFormatter(
output_stream, benchmark_metadata, options.reset_results,
options.browser_type, options.results_label))
else:
# Should never be reached. The parser enforces the choices.
raise Exception('Invalid --output-format "%s". Valid choices are: %s'
% (output_format, ', '.join(_OUTPUT_FORMAT_CHOICES)))
# TODO(chrishenry): This is here to not change the output of
# gtest. Let's try enabling skipped tests summary for gtest test
# results too (in a separate patch), and see if we break anything.
output_skipped_tests_summary = 'gtest' in options.output_formats
reporter = _GetProgressReporter(output_skipped_tests_summary,
options.suppress_gtest_report)
results = page_test_results.PageTestResults(
output_formatters=output_formatters, progress_reporter=reporter,
output_dir=options.output_dir,
value_can_be_added_predicate=value_can_be_added_predicate,
benchmark_enabled=benchmark_enabled)
results.telemetry_info.benchmark_name = benchmark_metadata.name
results.telemetry_info.benchmark_start_ms = time.time() * 1000.0
if options.results_label:
results.telemetry_info.label = options.results_label
return results
| mit |
yakky/django-cms | cms/models/fields.py | 3 | 3939 | # -*- coding: utf-8 -*-
from cms.forms.fields import PageSelectFormField
from cms.models.placeholdermodel import Placeholder
from django.db import models
class PlaceholderField(models.ForeignKey):
def __init__(self, slotname, default_width=None, actions=None, **kwargs):
from cms.utils.placeholder import PlaceholderNoAction, validate_placeholder_name
if not actions:
actions = PlaceholderNoAction
if kwargs.get('related_name', None) == '+':
raise ValueError("PlaceholderField does not support disabling of related names via '+'.")
if not callable(slotname):
validate_placeholder_name(slotname)
self.slotname = slotname
self.default_width = default_width
self.actions = actions()
kwargs.update({'null': True}) # always allow Null
kwargs.update({'editable': False}) # never allow edits in admin
# We hard-code the `to` argument for ForeignKey.__init__
# since a PlaceholderField can only be a ForeignKey to a Placeholder
kwargs['to'] = 'cms.Placeholder'
kwargs['on_delete'] = kwargs.get('on_delete', models.CASCADE)
super(PlaceholderField, self).__init__(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super(PlaceholderField, self).deconstruct()
kwargs['slotname'] = self.slotname
return name, path, args, kwargs
def _get_new_placeholder(self, instance):
return Placeholder.objects.create(slot=self._get_placeholder_slot(instance), default_width=self.default_width)
def _get_placeholder_slot(self, model_instance):
from cms.utils.placeholder import validate_placeholder_name
if callable(self.slotname):
slotname = self.slotname(model_instance)
validate_placeholder_name(slotname)
else:
slotname = self.slotname
return slotname
def pre_save(self, model_instance, add):
if not model_instance.pk:
setattr(model_instance, self.name, self._get_new_placeholder(model_instance))
else:
slot = self._get_placeholder_slot(model_instance)
placeholder = getattr(model_instance, self.name)
if not placeholder:
setattr(model_instance, self.name, self._get_new_placeholder(model_instance))
placeholder = getattr(model_instance, self.name)
if placeholder.slot != slot:
placeholder.slot = slot
placeholder.save()
return super(PlaceholderField, self).pre_save(model_instance, add)
def save_form_data(self, instance, data):
data = getattr(instance, self.name, '')
if not isinstance(data, Placeholder):
data = self._get_new_placeholder(instance)
super(PlaceholderField, self).save_form_data(instance, data)
def contribute_to_class(self, cls, name):
super(PlaceholderField, self).contribute_to_class(cls, name)
if not hasattr(cls._meta, 'placeholder_field_names'):
cls._meta.placeholder_field_names = []
if not hasattr(cls._meta, 'placeholder_fields'):
cls._meta.placeholder_fields = {}
cls._meta.placeholder_field_names.append(name)
cls._meta.placeholder_fields[self] = name
self.model = cls
class PageField(models.ForeignKey):
default_form_class = PageSelectFormField
def __init__(self, **kwargs):
# We hard-code the `to` argument for ForeignKey.__init__
# since a PageField can only be a ForeignKey to a Page
kwargs['to'] = 'cms.Page'
kwargs['on_delete'] = kwargs.get('on_delete', models.CASCADE)
super(PageField, self).__init__(**kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': self.default_form_class,
}
defaults.update(kwargs)
return super(PageField, self).formfield(**defaults)
| bsd-3-clause |
rds0751/colinkers | env/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py | 328 | 3365 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
from . import base
from ..constants import namespaces, voidElements
from ..constants import spaceCharacters
spaceCharacters = "".join(spaceCharacters)
class Filter(base.Filter):
def __init__(self, source, require_matching_tags=True):
super(Filter, self).__init__(source)
self.require_matching_tags = require_matching_tags
def __iter__(self):
open_elements = []
for token in base.Filter.__iter__(self):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
namespace = token["namespace"]
name = token["name"]
assert namespace is None or isinstance(namespace, text_type)
assert namespace != ""
assert isinstance(name, text_type)
assert name != ""
assert isinstance(token["data"], dict)
if (not namespace or namespace == namespaces["html"]) and name in voidElements:
assert type == "EmptyTag"
else:
assert type == "StartTag"
if type == "StartTag" and self.require_matching_tags:
open_elements.append((namespace, name))
for (namespace, name), value in token["data"].items():
assert namespace is None or isinstance(namespace, text_type)
assert namespace != ""
assert isinstance(name, text_type)
assert name != ""
assert isinstance(value, text_type)
elif type == "EndTag":
namespace = token["namespace"]
name = token["name"]
assert namespace is None or isinstance(namespace, text_type)
assert namespace != ""
assert isinstance(name, text_type)
assert name != ""
if (not namespace or namespace == namespaces["html"]) and name in voidElements:
assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name}
elif self.require_matching_tags:
start = open_elements.pop()
assert start == (namespace, name)
elif type == "Comment":
data = token["data"]
assert isinstance(data, text_type)
elif type in ("Characters", "SpaceCharacters"):
data = token["data"]
assert isinstance(data, text_type)
assert data != ""
if type == "SpaceCharacters":
assert data.strip(spaceCharacters) == ""
elif type == "Doctype":
name = token["name"]
assert name is None or isinstance(name, text_type)
assert token["publicId"] is None or isinstance(name, text_type)
assert token["systemId"] is None or isinstance(name, text_type)
elif type == "Entity":
assert isinstance(token["name"], text_type)
elif type == "SerializerError":
assert isinstance(token["data"], text_type)
else:
assert False, "Unknown token type: %(type)s" % {"type": type}
yield token
| agpl-3.0 |
torbjoernk/easybuild-framework | easybuild/toolchains/goolfc.py | 5 | 1656 | ##
# Copyright 2013-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for goolfc compiler toolchain (includes GCC+CUDA, OpenMPI, OpenBLAS, LAPACK, ScaLAPACK and FFTW).
@author: Kenneth Hoste (Ghent University)
"""
from easybuild.toolchains.gcccuda import GccCUDA
from easybuild.toolchains.fft.fftw import Fftw
from easybuild.toolchains.linalg.openblas import OpenBLAS
from easybuild.toolchains.linalg.scalapack import ScaLAPACK
from easybuild.toolchains.mpi.openmpi import OpenMPI
class Goolfc(GccCUDA, OpenMPI, OpenBLAS, ScaLAPACK, Fftw):
"""Compiler toolchain with GCC+CUDA, OpenMPI, OpenBLAS, ScaLAPACK and FFTW."""
NAME = 'goolfc'
| gpl-2.0 |
hovo1990/deviser | generator/tests/run-tests.py | 1 | 1281 | #!/usr/bin/env python
import os
import sys
import test_functions
##############################################################################
# Set up variables
fails = []
total_fail = 0
this_dir = ''
#############################################################################
# Specific test functions
def run_test(name, test_type):
global total_fail
test_name = 'test_{0}_{1}'.format(name, test_type)
test_case = os.path.join(this_dir, test_name)
os.chdir(test_case)
total_fail += test_functions.run_tests(test_name, name, fails)
os.chdir(this_dir)
#########################################################################
# Main function
def main():
test_functions.set_running_tests()
global this_dir
this_dir = os.path.dirname(os.path.abspath(__file__))
global total_fail
run_test('binding', 'code')
run_test('cmake', 'code')
run_test('cpp', 'code')
run_test('exit', 'codes')
run_test('tex', 'files')
print('')
if total_fail > 0:
print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
print('Check {0} fails'.format(total_fail))
for name in fails:
print(name)
else:
print('EVERYTHING PASSED')
if __name__ == '__main__':
main()
| lgpl-2.1 |
jaharkes/home-assistant | homeassistant/components/influxdb.py | 5 | 4307 | """
A component which allows you to send data to an Influx database.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/influxdb/
"""
import logging
import voluptuous as vol
from homeassistant.const import (
EVENT_STATE_CHANGED, STATE_UNAVAILABLE, STATE_UNKNOWN, CONF_HOST,
CONF_PORT, CONF_SSL, CONF_VERIFY_SSL, CONF_USERNAME, CONF_BLACKLIST,
CONF_PASSWORD, CONF_WHITELIST)
from homeassistant.helpers import state as state_helper
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['influxdb==3.0.0']
_LOGGER = logging.getLogger(__name__)
CONF_DB_NAME = 'database'
CONF_TAGS = 'tags'
DEFAULT_DATABASE = 'home_assistant'
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 8086
DEFAULT_SSL = False
DEFAULT_VERIFY_SSL = False
DOMAIN = 'influxdb'
TIMEOUT = 5
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Inclusive(CONF_USERNAME, 'authentication'): cv.string,
vol.Inclusive(CONF_PASSWORD, 'authentication'): cv.string,
vol.Optional(CONF_BLACKLIST, default=[]):
vol.All(cv.ensure_list, [cv.entity_id]),
vol.Optional(CONF_DB_NAME, default=DEFAULT_DATABASE): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_TAGS, default={}):
vol.Schema({cv.string: cv.string}),
vol.Optional(CONF_WHITELIST, default=[]):
vol.All(cv.ensure_list, [cv.entity_id]),
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Setup the InfluxDB component."""
from influxdb import InfluxDBClient, exceptions
conf = config[DOMAIN]
host = conf.get(CONF_HOST)
port = conf.get(CONF_PORT)
database = conf.get(CONF_DB_NAME)
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
ssl = conf.get(CONF_SSL)
verify_ssl = conf.get(CONF_VERIFY_SSL)
blacklist = conf.get(CONF_BLACKLIST)
whitelist = conf.get(CONF_WHITELIST)
tags = conf.get(CONF_TAGS)
try:
influx = InfluxDBClient(
host=host, port=port, username=username, password=password,
database=database, ssl=ssl, verify_ssl=verify_ssl,
timeout=TIMEOUT)
influx.query("select * from /.*/ LIMIT 1;")
except exceptions.InfluxDBClientError as exc:
_LOGGER.error("Database host is not accessible due to '%s', please "
"check your entries in the configuration file and that "
"the database exists and is READ/WRITE.", exc)
return False
def influx_event_listener(event):
"""Listen for new messages on the bus and sends them to Influx."""
state = event.data.get('new_state')
if state is None or state.state in (
STATE_UNKNOWN, '', STATE_UNAVAILABLE) or \
state.entity_id in blacklist:
return
try:
if len(whitelist) > 0 and state.entity_id not in whitelist:
return
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
measurement = state.attributes.get('unit_of_measurement')
if measurement in (None, ''):
measurement = state.entity_id
json_body = [
{
'measurement': measurement,
'tags': {
'domain': state.domain,
'entity_id': state.object_id,
},
'time': event.time_fired,
'fields': {
'value': _state,
}
}
]
for key, value in state.attributes.items():
if key != 'unit_of_measurement':
json_body[0]['fields'][key] = value
json_body[0]['tags'].update(tags)
try:
influx.write_points(json_body)
except exceptions.InfluxDBClientError:
_LOGGER.exception('Error saving event "%s" to InfluxDB', json_body)
hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener)
return True
| mit |
odoousers2014/LibrERP | l10n_it_ricevute_bancarie/wizard/wizard_emissione_riba.py | 2 | 6944 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 Andrea Cometa.
# Email: [email protected]
# Web site: http://www.andreacometa.it
# Copyright (C) 2012 Agile Business Group sagl (<http://www.agilebg.com>)
# Copyright (C) 2012 Domsense srl (<http://www.domsense.com>)
# Copyright (C) 2012 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
# -------------------------------------------------------
# EMISSIONE RIBA
# -------------------------------------------------------
class emissione_riba(orm.TransientModel):
_name = "riba.emissione"
_description = "Emissione Ricevute Bancarie"
_columns = {
'configurazione' : fields.many2one('riba.configurazione', 'Configurazione', required=True),
}
def crea_distinta(self, cr, uid, ids, context=None):
if context is None:
context = {}
def create_rdl(conta, rd_id, date_maturity, partner_id, acceptance_account_id, bank_id=None, bank_riba_id=None):
rdl = {
'sequence': conta,
'bank_id': bank_id,
'distinta_id': rd_id,
'due_date': date_maturity,
'partner_id': partner_id,
'state': 'draft',
'acceptance_account_id': acceptance_account_id,
'bank_riba_id': bank_riba_id,
}
return riba_distinta_line.create(cr, uid, rdl, context=context)
"""
Qui creiamo la distinta
"""
wizard_obj = self.browse(cr, uid, ids)[0]
active_ids = context and context.get('active_ids', [])
riba_distinta = self.pool.get('riba.distinta')
riba_distinta_line = self.pool.get('riba.distinta.line')
riba_distinta_move_line = self.pool.get('riba.distinta.move.line')
move_line_obj = self.pool.get('account.move.line')
# create distinta
rd = {
'name': self.pool.get('ir.sequence').get(cr, uid, 'seq.riba.distinta'),
'config': wizard_obj.configurazione.id,
'user_id': uid,
'date_created': fields.date.context_today(cr, uid, context),
}
rd_id = riba_distinta.create(cr, uid, rd)
# group by partner and due date
grouped_lines = {}
move_line_ids = move_line_obj.search(cr, uid, [('id', 'in', active_ids)], context=context)
for move_line in move_line_obj.browse(cr, uid, move_line_ids, context=context):
if move_line.partner_id.group_riba:
if not grouped_lines.get(
(move_line.partner_id.id, move_line.date_maturity), False):
grouped_lines[(move_line.partner_id.id, move_line.date_maturity)] = []
grouped_lines[(move_line.partner_id.id, move_line.date_maturity)].append(
move_line)
# create lines
conta = 1
for move_line in move_line_obj.browse(cr, uid, move_line_ids, context=context):
if move_line.partner_id.bank_riba_id:
bank_riba_id = move_line.partner_id.bank_riba_id
elif move_line.partner_id.bank_ids:
bank_riba_id = []
bank_id = move_line.partner_id.bank_ids[0]
else:
raise orm.except_orm('Attenzione!', 'Il cliente %s non ha la banca!!!' % move_line.partner_id.name)
if move_line.partner_id.group_riba:
for key in grouped_lines:
if key[0] == move_line.partner_id.id and key[1] == move_line.date_maturity:
if bank_riba_id:
rdl_id = create_rdl(conta, rd_id, move_line.date_maturity, move_line.partner_id.id,
wizard_obj.configurazione.acceptance_account_id.id, None, bank_riba_id.id)
else:
rdl_id = create_rdl(conta, rd_id, move_line.date_maturity, move_line.partner_id.id,
wizard_obj.configurazione.acceptance_account_id.id, bank_id.id, None)
# total = 0.0
# invoice_date_group = ''
for grouped_line in grouped_lines[key]:
riba_distinta_move_line.create(cr, uid, {
'riba_line_id': rdl_id,
'amount': grouped_line.credit and grouped_line.amount_residual * -1 or grouped_line.debit and grouped_line.amount_residual,
'move_line_id': grouped_line.id,
}, context=context)
del grouped_lines[key]
break
else:
if bank_riba_id:
rdl_id = create_rdl(conta, rd_id, move_line.date_maturity, move_line.partner_id.id,
wizard_obj.configurazione.acceptance_account_id.id, None, bank_riba_id.id)
else:
rdl_id = create_rdl(conta, rd_id, move_line.date_maturity, move_line.partner_id.id,
wizard_obj.configurazione.acceptance_account_id.id, bank_id.id, None)
riba_distinta_move_line.create(cr, uid, {
'riba_line_id': rdl_id,
'amount': move_line.amount_residual,
'move_line_id': move_line.id,
}, context=context)
conta += 1
# ----- show distinta form
mod_obj = self.pool.get('ir.model.data')
res = mod_obj.get_object_reference(cr, uid, 'l10n_it_ricevute_bancarie', 'view_distinta_riba_form')
res_id = res and res[1] or False,
return {
'name': 'Distinta',
'view_type': 'form',
'view_mode': 'form',
'view_id': res_id,
'res_model': 'riba.distinta',
'type': 'ir.actions.act_window',
#'nodestroy': True,
'target': 'current',
'res_id': rd_id or False,
}
| agpl-3.0 |
asgard-lab/neutron | neutron/tests/functional/agent/l3/test_namespace_manager.py | 25 | 3626 | # Copyright (c) 2015 Rackspace
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import uuidutils
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import namespace_manager
from neutron.agent.l3 import namespaces
from neutron.agent.linux import ip_lib
from neutron.tests.functional import base
_uuid = uuidutils.generate_uuid
class NamespaceManagerTestFramework(base.BaseSudoTestCase):
def setUp(self):
super(NamespaceManagerTestFramework, self).setUp()
self.agent_conf = mock.MagicMock()
self.agent_conf.router_delete_namespaces = True
self.metadata_driver_mock = mock.Mock()
self.namespace_manager = namespace_manager.NamespaceManager(
self.agent_conf, driver=None, clean_stale=True,
metadata_driver=self.metadata_driver_mock)
def _create_namespace(self, router_id, ns_class):
namespace = ns_class(router_id, self.agent_conf, driver=None,
use_ipv6=False)
namespace.create()
self.addCleanup(self._delete_namespace, namespace)
return namespace.name
def _delete_namespace(self, namespace):
try:
namespace.delete()
except RuntimeError as e:
# If the namespace didn't exist when delete was attempted, mission
# accomplished. Otherwise, re-raise the exception
if 'No such file or directory' not in str(e):
raise e
def _namespace_exists(self, namespace):
ip = ip_lib.IPWrapper(namespace=namespace)
return ip.netns.exists(namespace)
class NamespaceManagerTestCase(NamespaceManagerTestFramework):
def test_namespace_manager(self):
router_id = _uuid()
router_id_to_delete = _uuid()
to_keep = set()
to_delete = set()
to_retrieve = set()
to_keep.add(self._create_namespace(router_id,
namespaces.RouterNamespace))
to_keep.add(self._create_namespace(router_id,
dvr_snat_ns.SnatNamespace))
to_delete.add(self._create_namespace(router_id_to_delete,
dvr_snat_ns.SnatNamespace))
to_retrieve = to_keep | to_delete
with mock.patch.object(namespace_manager.NamespaceManager, 'list_all',
return_value=to_retrieve):
with self.namespace_manager as ns_manager:
for ns_name in to_keep:
id_to_keep = ns_manager.get_prefix_and_id(ns_name)[1]
ns_manager.keep_router(id_to_keep)
for ns_name in to_keep:
self.assertTrue(self._namespace_exists(ns_name))
for ns_name in to_delete:
(self.metadata_driver_mock.destroy_monitored_metadata_proxy.
assert_called_once_with(mock.ANY,
router_id_to_delete,
self.agent_conf))
self.assertFalse(self._namespace_exists(ns_name))
| apache-2.0 |
zbqf109/goodo | openerp/addons/l10n_in_hr_payroll/wizard/hr_salary_employee_bymonth.py | 47 | 1864 | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from openerp.osv import fields, osv
class hr_salary_employee_bymonth(osv.osv_memory):
_name = 'hr.salary.employee.month'
_description = 'Hr Salary Employee By Month Report'
_columns = {
'start_date': fields.date('Start Date', required=True),
'end_date': fields.date('End Date', required=True),
'employee_ids': fields.many2many('hr.employee', 'payroll_year_rel', 'payroll_year_id', 'employee_id', 'Employees', required=True),
'category_id': fields.many2one('hr.salary.rule.category', 'Category', required=True),
}
def _get_default_category(self, cr, uid, context=None):
category_ids = self.pool.get('hr.salary.rule.category').search(cr, uid, [('code', '=', 'NET')], context=context)
return category_ids and category_ids[0] or False
_defaults = {
'start_date': lambda *a: time.strftime('%Y-01-01'),
'end_date': lambda *a: time.strftime('%Y-%m-%d'),
'category_id': _get_default_category
}
def print_report(self, cr, uid, ids, context=None):
"""
To get the date and print the report
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return: return report
"""
if context is None:
context = {}
datas = {'ids': context.get('active_ids', [])}
res = self.read(cr, uid, ids, context=context)
res = res and res[0] or {}
datas.update({'form': res})
return self.pool['report'].get_action(cr, uid, ids,
'l10n_in_hr_payroll.report_hrsalarybymonth',
data=datas, context=context)
| gpl-3.0 |
kirca/odoo | openerp/addons/base/res/res_company.py | 37 | 21361 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import re
import openerp
from openerp import SUPERUSER_ID, tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools import image_resize_image
class multi_company_default(osv.osv):
"""
Manage multi company default value
"""
_name = 'multi_company.default'
_description = 'Default multi company'
_order = 'company_id,sequence,id'
_columns = {
'sequence': fields.integer('Sequence'),
'name': fields.char('Name', size=256, required=True, help='Name it to easily find a record'),
'company_id': fields.many2one('res.company', 'Main Company', required=True,
help='Company where the user is connected'),
'company_dest_id': fields.many2one('res.company', 'Default Company', required=True,
help='Company to store the current record'),
'object_id': fields.many2one('ir.model', 'Object', required=True,
help='Object affected by this rule'),
'expression': fields.char('Expression', size=256, required=True,
help='Expression, must be True to match\nuse context.get or user (browse)'),
'field_id': fields.many2one('ir.model.fields', 'Field', help='Select field property'),
}
_defaults = {
'expression': 'True',
'sequence': 100,
}
def copy(self, cr, uid, id, default=None, context=None):
"""
Add (copy) in the name when duplicate record
"""
if not context:
context = {}
if not default:
default = {}
company = self.browse(cr, uid, id, context=context)
default = default.copy()
default['name'] = company.name + _(' (copy)')
return super(multi_company_default, self).copy(cr, uid, id, default, context=context)
multi_company_default()
class res_company(osv.osv):
_name = "res.company"
_description = 'Companies'
_order = 'name'
def _get_address_data(self, cr, uid, ids, field_names, arg, context=None):
""" Read the 'address' functional fields. """
result = {}
part_obj = self.pool.get('res.partner')
for company in self.browse(cr, uid, ids, context=context):
result[company.id] = {}.fromkeys(field_names, False)
if company.partner_id:
address_data = part_obj.address_get(cr, openerp.SUPERUSER_ID, [company.partner_id.id], adr_pref=['default'])
if address_data['default']:
address = part_obj.read(cr, openerp.SUPERUSER_ID, address_data['default'], field_names, context=context)
for field in field_names:
result[company.id][field] = address[field] or False
return result
def _set_address_data(self, cr, uid, company_id, name, value, arg, context=None):
""" Write the 'address' functional fields. """
company = self.browse(cr, uid, company_id, context=context)
if company.partner_id:
part_obj = self.pool.get('res.partner')
address_data = part_obj.address_get(cr, uid, [company.partner_id.id], adr_pref=['default'])
address = address_data['default']
if address:
part_obj.write(cr, uid, [address], {name: value or False}, context=context)
else:
part_obj.create(cr, uid, {name: value or False, 'parent_id': company.partner_id.id}, context=context)
return True
def _get_logo_web(self, cr, uid, ids, _field_name, _args, context=None):
result = dict.fromkeys(ids, False)
for record in self.browse(cr, uid, ids, context=context):
size = (180, None)
result[record.id] = image_resize_image(record.partner_id.image, size)
return result
def _get_companies_from_partner(self, cr, uid, ids, context=None):
return self.pool['res.company'].search(cr, uid, [('partner_id', 'in', ids)], context=context)
_columns = {
'name': fields.related('partner_id', 'name', string='Company Name', size=128, required=True, store=True, type='char'),
'parent_id': fields.many2one('res.company', 'Parent Company', select=True),
'child_ids': fields.one2many('res.company', 'parent_id', 'Child Companies'),
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
'rml_header': fields.text('RML Header', required=True),
'rml_header1': fields.char('Company Tagline', size=200, help="Appears by default on the top right corner of your printed documents (report header)."),
'rml_header2': fields.text('RML Internal Header', required=True),
'rml_header3': fields.text('RML Internal Header for Landscape Reports', required=True),
'rml_footer': fields.text('Report Footer', help="Footer text displayed at the bottom of all reports."),
'rml_footer_readonly': fields.related('rml_footer', type='text', string='Report Footer', readonly=True),
'custom_footer': fields.boolean('Custom Footer', help="Check this to define the report footer manually. Otherwise it will be filled in automatically."),
'font': fields.many2one('res.font', string="Font", domain=[('mode', 'in', ('Normal', 'Regular', 'all', 'Book'))],
help="Set the font into the report header, it will be used as default font in the RML reports of the user company"),
'logo': fields.related('partner_id', 'image', string="Logo", type="binary"),
'logo_web': fields.function(_get_logo_web, string="Logo Web", type="binary", store={
'res.company': (lambda s, c, u, i, x: i, ['partner_id'], 10),
'res.partner': (_get_companies_from_partner, ['image'], 10),
}),
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
'currency_ids': fields.one2many('res.currency', 'company_id', 'Currency'),
'user_ids': fields.many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', 'Accepted Users'),
'account_no':fields.char('Account No.', size=64),
'street': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street", multi='address'),
'street2': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street2", multi='address'),
'zip': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="Zip", multi='address'),
'city': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="City", multi='address'),
'state_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country.state', string="Fed. State", multi='address'),
'bank_ids': fields.one2many('res.partner.bank','company_id', 'Bank Accounts', help='Bank accounts related to this company'),
'country_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country', string="Country", multi='address'),
'email': fields.related('partner_id', 'email', size=64, type='char', string="Email", store=True),
'phone': fields.related('partner_id', 'phone', size=64, type='char', string="Phone", store=True),
'fax': fields.function(_get_address_data, fnct_inv=_set_address_data, size=64, type='char', string="Fax", multi='address'),
'website': fields.related('partner_id', 'website', string="Website", type="char", size=64),
'vat': fields.related('partner_id', 'vat', string="Tax ID", type="char", size=32),
'company_registry': fields.char('Company Registry', size=64),
'rml_paper_format': fields.selection([('a4', 'A4'), ('us_letter', 'US Letter')], "Paper Format", required=True, oldname='paper_format'),
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The company name must be unique !')
]
def onchange_footer(self, cr, uid, ids, custom_footer, phone, fax, email, website, vat, company_registry, bank_ids, context=None):
if custom_footer:
return {}
# first line (notice that missing elements are filtered out before the join)
res = ' | '.join(filter(bool, [
phone and '%s: %s' % (_('Phone'), phone),
fax and '%s: %s' % (_('Fax'), fax),
email and '%s: %s' % (_('Email'), email),
website and '%s: %s' % (_('Website'), website),
vat and '%s: %s' % (_('TIN'), vat),
company_registry and '%s: %s' % (_('Reg'), company_registry),
]))
# second line: bank accounts
res_partner_bank = self.pool.get('res.partner.bank')
account_data = self.resolve_2many_commands(cr, uid, 'bank_ids', bank_ids, context=context)
account_names = res_partner_bank._prepare_name_get(cr, uid, account_data, context=context)
if account_names:
title = _('Bank Accounts') if len(account_names) > 1 else _('Bank Account')
res += '\n%s: %s' % (title, ', '.join(name for id, name in account_names))
return {'value': {'rml_footer': res, 'rml_footer_readonly': res}}
def onchange_state(self, cr, uid, ids, state_id, context=None):
if state_id:
return {'value':{'country_id': self.pool.get('res.country.state').browse(cr, uid, state_id, context).country_id.id }}
return {}
def onchange_font_name(self, cr, uid, ids, font, rml_header, rml_header2, rml_header3, context=None):
""" To change default header style of all <para> and drawstring. """
def _change_header(header,font):
""" Replace default fontname use in header and setfont tag """
default_para = re.sub('fontName.?=.?".*"', 'fontName="%s"'% font, header)
return re.sub('(<setFont.?name.?=.?)(".*?")(.)', '\g<1>"%s"\g<3>'% font, default_para)
if not font:
return True
fontname = self.pool.get('res.font').browse(cr, uid, font, context=context).name
return {'value':{
'rml_header': _change_header(rml_header, fontname),
'rml_header2':_change_header(rml_header2, fontname),
'rml_header3':_change_header(rml_header3, fontname)
}}
def on_change_country(self, cr, uid, ids, country_id, context=None):
res = {'domain': {'state_id': []}}
currency_id = self._get_euro(cr, uid, context=context)
if country_id:
currency_id = self.pool.get('res.country').browse(cr, uid, country_id, context=context).currency_id.id
res['domain'] = {'state_id': [('country_id','=',country_id)]}
res['value'] = {'currency_id': currency_id}
return res
def name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100):
if context is None:
context = {}
if context.pop('user_preference', None):
# We browse as superuser. Otherwise, the user would be able to
# select only the currently visible companies (according to rules,
# which are probably to allow to see the child companies) even if
# she belongs to some other companies.
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
cmp_ids = list(set([user.company_id.id] + [cmp.id for cmp in user.company_ids]))
uid = SUPERUSER_ID
args = (args or []) + [('id', 'in', cmp_ids)]
return super(res_company, self).name_search(cr, uid, name=name, args=args, operator=operator, context=context, limit=limit)
def _company_default_get(self, cr, uid, object=False, field=False, context=None):
"""
Check if the object for this company have a default value
"""
if not context:
context = {}
proxy = self.pool.get('multi_company.default')
args = [
('object_id.model', '=', object),
('field_id', '=', field),
]
ids = proxy.search(cr, uid, args, context=context)
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
for rule in proxy.browse(cr, uid, ids, context):
if eval(rule.expression, {'context': context, 'user': user}):
return rule.company_dest_id.id
return user.company_id.id
@tools.ormcache()
def _get_company_children(self, cr, uid=None, company=None):
if not company:
return []
ids = self.search(cr, uid, [('parent_id','child_of',[company])])
return ids
def _get_partner_hierarchy(self, cr, uid, company_id, context=None):
if company_id:
parent_id = self.browse(cr, uid, company_id)['parent_id']
if parent_id:
return self._get_partner_hierarchy(cr, uid, parent_id.id, context)
else:
return self._get_partner_descendance(cr, uid, company_id, [], context)
return []
def _get_partner_descendance(self, cr, uid, company_id, descendance, context=None):
descendance.append(self.browse(cr, uid, company_id).partner_id.id)
for child_id in self._get_company_children(cr, uid, company_id):
if child_id != company_id:
descendance = self._get_partner_descendance(cr, uid, child_id, descendance)
return descendance
#
# This function restart the cache on the _get_company_children method
#
def cache_restart(self, cr):
self._get_company_children.clear_cache(self)
def create(self, cr, uid, vals, context=None):
if not vals.get('name', False) or vals.get('partner_id', False):
self.cache_restart(cr)
return super(res_company, self).create(cr, uid, vals, context=context)
obj_partner = self.pool.get('res.partner')
partner_id = obj_partner.create(cr, uid, {'name': vals['name'], 'is_company':True, 'image': vals.get('logo', False)}, context=context)
vals.update({'partner_id': partner_id})
self.cache_restart(cr)
company_id = super(res_company, self).create(cr, uid, vals, context=context)
obj_partner.write(cr, uid, [partner_id], {'company_id': company_id}, context=context)
return company_id
def write(self, cr, uid, ids, values, context=None):
self.cache_restart(cr)
return super(res_company, self).write(cr, uid, ids, values, context=context)
def _get_euro(self, cr, uid, context=None):
rate_obj = self.pool.get('res.currency.rate')
rate_id = rate_obj.search(cr, uid, [('rate', '=', 1)], context=context)
return rate_id and rate_obj.browse(cr, uid, rate_id[0], context=context).currency_id.id or False
def _get_logo(self, cr, uid, ids):
return open(os.path.join( tools.config['root_path'], 'addons', 'base', 'res', 'res_company_logo.png'), 'rb') .read().encode('base64')
def _get_font(self, cr, uid, ids):
font_obj = self.pool.get('res.font')
res = font_obj.search(cr, uid, [('family', '=', 'Helvetica'), ('mode', '=', 'all')], limit=1)
return res and res[0] or False
_header = """
<header>
<pageTemplate>
<frame id="first" x1="28.0" y1="28.0" width="%s" height="%s"/>
<stylesheet>
<!-- Set here the default font to use for all <para> tags -->
<paraStyle name='Normal' fontName="DejaVuSans"/>
</stylesheet>
<pageGraphics>
<fill color="black"/>
<stroke color="black"/>
<setFont name="DejaVuSans" size="8"/>
<drawString x="%s" y="%s"> [[ formatLang(time.strftime("%%Y-%%m-%%d"), date=True) ]] [[ time.strftime("%%H:%%M") ]]</drawString>
<setFont name="DejaVuSans-Bold" size="10"/>
<drawCentredString x="%s" y="%s">[[ company.partner_id.name ]]</drawCentredString>
<stroke color="#000000"/>
<lines>%s</lines>
<!-- Set here the default font to use for all <drawString> tags -->
<!-- don't forget to change the 2 other occurence of <setFont> above if needed -->
<setFont name="DejaVuSans" size="8"/>
</pageGraphics>
</pageTemplate>
</header>"""
_header2 = _header % (539, 772, "1.0cm", "28.3cm", "11.1cm", "28.3cm", "1.0cm 28.1cm 20.1cm 28.1cm")
_header3 = _header % (786, 525, 25, 555, 440, 555, "25 550 818 550")
def _get_header(self,cr,uid,ids):
try :
header_file = tools.file_open(os.path.join('base', 'report', 'corporate_rml_header.rml'))
try:
return header_file.read()
finally:
header_file.close()
except:
return self._header_a4
_header_main = """
<header>
<pageTemplate>
<frame id="first" x1="1.3cm" y1="3.0cm" height="%s" width="19.0cm"/>
<stylesheet>
<!-- Set here the default font to use for all <para> tags -->
<paraStyle name='Normal' fontName="DejaVuSans"/>
<paraStyle name="main_footer" fontSize="8.0" alignment="CENTER"/>
<paraStyle name="main_header" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
</stylesheet>
<pageGraphics>
<!-- Set here the default font to use for all <drawString> tags -->
<setFont name="DejaVuSans" size="8"/>
<!-- You Logo - Change X,Y,Width and Height -->
<image x="1.3cm" y="%s" height="40.0" >[[ company.logo or removeParentNode('image') ]]</image>
<fill color="black"/>
<stroke color="black"/>
<!-- page header -->
<lines>1.3cm %s 20cm %s</lines>
<drawRightString x="20cm" y="%s">[[ company.rml_header1 ]]</drawRightString>
<drawString x="1.3cm" y="%s">[[ company.partner_id.name ]]</drawString>
<place x="1.3cm" y="%s" height="1.8cm" width="15.0cm">
<para style="main_header">[[ display_address(company.partner_id) or '' ]]</para>
</place>
<drawString x="1.3cm" y="%s">Phone:</drawString>
<drawRightString x="7cm" y="%s">[[ company.partner_id.phone or '' ]]</drawRightString>
<drawString x="1.3cm" y="%s">Mail:</drawString>
<drawRightString x="7cm" y="%s">[[ company.partner_id.email or '' ]]</drawRightString>
<lines>1.3cm %s 7cm %s</lines>
<!-- left margin -->
<rotate degrees="90"/>
<fill color="grey"/>
<drawString x="2.65cm" y="-0.4cm">generated by OpenERP.com</drawString>
<fill color="black"/>
<rotate degrees="-90"/>
<!--page bottom-->
<lines>1.2cm 2.65cm 19.9cm 2.65cm</lines>
<place x="1.3cm" y="0cm" height="2.55cm" width="19.0cm">
<para style="main_footer">[[ company.rml_footer ]]</para>
<para style="main_footer">Contact : [[ user.name ]] - Page: <pageNumber/></para>
</place>
</pageGraphics>
</pageTemplate>
</header>"""
_header_a4 = _header_main % ('21.7cm', '27.7cm', '27.7cm', '27.7cm', '27.8cm', '27.3cm', '25.3cm', '25.0cm', '25.0cm', '24.6cm', '24.6cm', '24.5cm', '24.5cm')
_header_letter = _header_main % ('20cm', '26.0cm', '26.0cm', '26.0cm', '26.1cm', '25.6cm', '23.6cm', '23.3cm', '23.3cm', '22.9cm', '22.9cm', '22.8cm', '22.8cm')
def onchange_rml_paper_format(self, cr, uid, ids, rml_paper_format, context=None):
if rml_paper_format == 'us_letter':
return {'value': {'rml_header': self._header_letter}}
return {'value': {'rml_header': self._header_a4}}
def act_discover_fonts(self, cr, uid, ids, context=None):
return self.pool.get("res.font").font_scan(cr, uid, context=context)
_defaults = {
'currency_id': _get_euro,
'rml_paper_format': 'a4',
'rml_header':_get_header,
'rml_header2': _header2,
'rml_header3': _header3,
'logo':_get_logo,
'font':_get_font,
}
_constraints = [
(osv.osv._check_recursion, 'Error! You can not create recursive companies.', ['parent_id'])
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
PaoloW8/android_kernel_nubia_nx505j | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <[email protected]>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/pymongo/mongo_replica_set_client.py | 2 | 83540 | # Copyright 2011-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Tools for connecting to a MongoDB replica set.
.. seealso:: :doc:`/examples/high_availability` for more examples of
how to connect to a replica set.
To get a :class:`~pymongo.database.Database` instance from a
:class:`MongoReplicaSetClient` use either dictionary-style or
attribute-style access:
.. doctest::
>>> from pymongo import MongoReplicaSetClient
>>> c = MongoReplicaSetClient('localhost:27017', replicaSet='repl0')
>>> c.test_database
Database(MongoReplicaSetClient([u'...', u'...']), u'test_database')
>>> c['test_database']
Database(MongoReplicaSetClient([u'...', u'...']), u'test_database')
"""
import atexit
import datetime
import socket
import struct
import threading
import time
import warnings
import weakref
from bson.binary import PYTHON_LEGACY
from bson.codec_options import CodecOptions
from bson.py3compat import b
from pymongo import (auth,
common,
database,
helpers,
message,
pool,
thread_util,
uri_parser)
from pymongo.member import Member
from pymongo.read_preferences import (
ReadPreference, select_member, modes, MovingAverage, _ServerMode)
from pymongo.errors import (AutoReconnect,
ConfigurationError,
ConnectionFailure,
DocumentTooLarge,
DuplicateKeyError,
OperationFailure,
InvalidOperation)
from pymongo.thread_util import DummyLock
EMPTY = b("")
MAX_RETRY = 3
MONITORS = set()
def register_monitor(monitor):
ref = weakref.ref(monitor, _on_monitor_deleted)
MONITORS.add(ref)
def _on_monitor_deleted(ref):
"""Remove the weakreference from the set
of active MONITORS. We no longer
care about keeping track of it
"""
MONITORS.remove(ref)
def shutdown_monitors():
# Keep a local copy of MONITORS as
# shutting down threads has a side effect
# of removing them from the MONITORS set()
monitors = list(MONITORS)
for ref in monitors:
monitor = ref()
if monitor:
monitor.shutdown()
monitor.join(10)
atexit.register(shutdown_monitors)
def _partition_node(node):
"""Split a host:port string returned from mongod/s into
a (host, int(port)) pair needed for socket.connect().
"""
host = node
port = 27017
idx = node.rfind(':')
if idx != -1:
host, port = node[:idx], int(node[idx + 1:])
if host.startswith('['):
host = host[1:-1]
return host, port
# Concurrency notes: A MongoReplicaSetClient keeps its view of the replica-set
# state in an RSState instance. RSStates are immutable, except for
# host-pinning. Pools, which are internally thread / greenlet safe, can be
# copied from old to new RSStates safely. The client updates its view of the
# set's state not by modifying its RSState but by replacing it with an updated
# copy.
# In __init__, MongoReplicaSetClient gets a list of potential members called
# 'seeds' from its initial parameters, and calls refresh(). refresh() iterates
# over the the seeds in arbitrary order looking for a member it can connect to.
# Once it finds one, it calls 'ismaster' and sets self.__hosts to the list of
# members in the response, and connects to the rest of the members. refresh()
# sets the MongoReplicaSetClient's RSState. Finally, __init__ launches the
# replica-set monitor.
# The monitor calls refresh() every 30 seconds, or whenever the client has
# encountered an error that prompts it to wake the monitor.
# Every method that accesses the RSState multiple times within the method makes
# a local reference first and uses that throughout, so it's isolated from a
# concurrent method replacing the RSState with an updated copy. This technique
# avoids the need to lock around accesses to the RSState.
class RSState(object):
def __init__(
self, threadlocal, hosts=None, host_to_member=None, arbiters=None,
writer=None, error_message='No primary available', exc=None,
initial=False):
"""An immutable snapshot of the client's view of the replica set state.
Stores Member instances for all members we're connected to, and a
list of (host, port) pairs for all the hosts and arbiters listed
in the most recent ismaster response.
:Parameters:
- `threadlocal`: Thread- or greenlet-local storage
- `hosts`: Sequence of (host, port) pairs
- `host_to_member`: Optional dict: (host, port) -> Member instance
- `arbiters`: Optional sequence of arbiters as (host, port)
- `writer`: Optional (host, port) of primary
- `error_message`: Optional error if `writer` is None
- `exc`: Optional error if state is unusable
- `initial`: Whether this is the initial client state
"""
self._threadlocal = threadlocal # threading.local or gevent local
self._arbiters = frozenset(arbiters or []) # set of (host, port)
self._writer = writer # (host, port) of the primary, or None
self._error_message = error_message
self._host_to_member = host_to_member or {}
self._hosts = frozenset(hosts or [])
self._members = frozenset(self._host_to_member.values())
self._exc = exc
self._initial = initial
self._primary_member = self.get(writer)
def clone_with_host_down(self, host, error_message):
"""Get a clone, marking as "down" the member with the given (host, port)
"""
members = self._host_to_member.copy()
members.pop(host, None)
if host == self.writer:
# The primary went down; record the error message.
return RSState(
self._threadlocal,
self._hosts,
members,
self._arbiters,
None,
error_message,
self._exc)
else:
# Some other host went down. Keep our current primary or, if it's
# already down, keep our current error message.
return RSState(
self._threadlocal,
self._hosts,
members,
self._arbiters,
self._writer,
self._error_message,
self._exc)
def clone_without_writer(self, threadlocal):
"""Get a clone without a primary. Unpins all threads.
:Parameters:
- `threadlocal`: Thread- or greenlet-local storage
"""
return RSState(
threadlocal,
self._hosts,
self._host_to_member,
self._arbiters)
def clone_with_error(self, exc):
return RSState(
self._threadlocal,
self._hosts,
self._host_to_member.copy(),
self._arbiters,
self._writer,
self._error_message,
exc)
@property
def arbiters(self):
"""(host, port) pairs from the last ismaster response's arbiter list.
"""
return self._arbiters
@property
def writer(self):
"""(host, port) of primary, or None."""
return self._writer
@property
def primary_member(self):
return self._primary_member
@property
def hosts(self):
"""(host, port) pairs from the last ismaster response's host list."""
return self._hosts
@property
def members(self):
"""Set of Member instances."""
return self._members
@property
def error_message(self):
"""The error, if any, raised when trying to connect to the primary"""
return self._error_message
@property
def secondaries(self):
"""Set of (host, port) pairs, secondaries we're connected to."""
# Unlike the other properties, this isn't cached because it isn't used
# in regular operations.
return set([
host for host, member in self._host_to_member.items()
if member.is_secondary])
@property
def exc(self):
"""Reason RSState is unusable, or None."""
return self._exc
@property
def initial(self):
"""Whether this is the initial client state."""
return self._initial
def get(self, host):
"""Return a Member instance or None for the given (host, port)."""
return self._host_to_member.get(host)
def pin_host(self, host, mode, tag_sets, latency):
"""Pin this thread / greenlet to a member.
`host` is a (host, port) pair. The remaining parameters are a read
preference.
"""
# Fun fact: Unlike in thread_util.ThreadIdent, we needn't lock around
# assignment here. Assignment to a threadlocal is only unsafe if it
# can cause other Python code to run implicitly.
self._threadlocal.host = host
self._threadlocal.read_preference = (mode, tag_sets, latency)
def keep_pinned_host(self, mode, tag_sets, latency):
"""Does a read pref match the last used by this thread / greenlet?"""
return self._threadlocal.read_preference == (mode, tag_sets, latency)
@property
def pinned_host(self):
"""The (host, port) last used by this thread / greenlet, or None."""
return getattr(self._threadlocal, 'host', None)
def unpin_host(self):
"""Forget this thread / greenlet's last used member."""
self._threadlocal.host = self._threadlocal.read_preference = None
@property
def threadlocal(self):
return self._threadlocal
def __str__(self):
return '<RSState [%s] writer="%s">' % (
', '.join(str(member) for member in self._host_to_member.itervalues()),
self.writer and '%s:%s' % self.writer or None)
class Monitor(object):
"""Base class for replica set monitors.
"""
_refresh_interval = 30
def __init__(self, rsc, event_class):
self.rsc = weakref.proxy(rsc, self.shutdown)
self.timer = event_class()
self.refreshed = event_class()
self.started_event = event_class()
self.stopped = False
def start_sync(self):
"""Start the Monitor and block until it's really started.
"""
# start() can return before the thread is fully bootstrapped,
# so a fork can leave the thread thinking it's alive in a child
# process when it's really dead:
# http://bugs.python.org/issue18418.
self.start() # Implemented in subclasses.
self.started_event.wait(5)
def shutdown(self, dummy=None):
"""Signal the monitor to shutdown.
"""
self.stopped = True
self.timer.set()
def schedule_refresh(self):
"""Refresh immediately
"""
if not self.isAlive():
# Checks in RS client should prevent this.
raise AssertionError("schedule_refresh called with dead monitor")
self.refreshed.clear()
self.timer.set()
def wait_for_refresh(self, timeout_seconds):
"""Block until a scheduled refresh completes
"""
self.refreshed.wait(timeout_seconds)
def monitor(self):
"""Run until the RSC is collected or an
unexpected error occurs.
"""
self.started_event.set()
while True:
self.timer.wait(Monitor._refresh_interval)
if self.stopped:
break
self.timer.clear()
try:
try:
self.rsc.refresh()
finally:
self.refreshed.set()
except (AutoReconnect, OperationFailure):
pass
# RSC has been collected or there
# was an unexpected error.
except:
break
def isAlive(self):
raise NotImplementedError()
class MonitorThread(threading.Thread, Monitor):
"""Thread based replica set monitor.
"""
def __init__(self, rsc):
Monitor.__init__(self, rsc, threading.Event)
threading.Thread.__init__(self)
self.setName("ReplicaSetMonitorThread")
self.setDaemon(True)
def run(self):
"""Override Thread's run method.
"""
self.monitor()
have_gevent = False
try:
from gevent import Greenlet
from gevent.event import Event
# Used by ReplicaSetConnection
from gevent.local import local as gevent_local
have_gevent = True
class MonitorGreenlet(Monitor, Greenlet):
"""Greenlet based replica set monitor.
"""
def __init__(self, rsc):
self.monitor_greenlet_alive = False
Monitor.__init__(self, rsc, Event)
Greenlet.__init__(self)
def start_sync(self):
self.monitor_greenlet_alive = True
# Call superclass.
Monitor.start_sync(self)
# Don't override `run` in a Greenlet. Add _run instead.
# Refer to gevent's Greenlet docs and source for more
# information.
def _run(self):
"""Define Greenlet's _run method.
"""
self.monitor()
def isAlive(self):
# bool(self) isn't immediately True after someone calls start(),
# but isAlive() is. Thus it's safe for greenlets to do:
# "if not monitor.isAlive(): monitor.start()"
# ... and be guaranteed only one greenlet starts the monitor.
return self.monitor_greenlet_alive
except ImportError:
pass
class MongoReplicaSetClient(common.BaseObject):
"""Connection to a MongoDB replica set.
"""
# For tests.
_refresh_timeout_sec = 5
_rs_client = True
def __init__(self, hosts_or_uri=None, max_pool_size=100,
document_class=dict, tz_aware=False, **kwargs):
"""Create a new connection to a MongoDB replica set.
The resultant client object has connection-pooling built
in. It also performs auto-reconnection when necessary. If an
operation fails because of a connection error,
:class:`~pymongo.errors.ConnectionFailure` is raised. If
auto-reconnection will be performed,
:class:`~pymongo.errors.AutoReconnect` will be
raised. Application code should handle this exception
(recognizing that the operation failed) and then continue to
execute.
Raises :class:`~pymongo.errors.ConnectionFailure` if
the connection cannot be made.
The `hosts_or_uri` parameter can be a full `mongodb URI
<http://dochub.mongodb.org/core/connections>`_, in addition to
a string of `host:port` pairs (e.g. 'host1:port1,host2:port2').
If `hosts_or_uri` is None 'localhost:27017' will be used.
.. note:: Instances of :class:`MongoReplicaSetClient` start a
background task to monitor the state of the replica set. This allows
it to quickly respond to changes in replica set configuration.
Before discarding an instance of :class:`MongoReplicaSetClient` make
sure you call :meth:`~close` to ensure that the monitor task is
cleanly shut down.
:Parameters:
- `hosts_or_uri` (optional): A MongoDB URI or string of `host:port`
pairs. If a host is an IPv6 literal it must be enclosed in '[' and
']' characters following the RFC2732 URL syntax (e.g. '[::1]' for
localhost)
- `max_pool_size` (optional): The maximum number of connections
each pool will open simultaneously. If this is set, operations
will block if there are `max_pool_size` outstanding connections
from the pool. Defaults to 100. Cannot be 0.
- `document_class` (optional): default class to use for
documents returned from queries on this client
- `tz_aware` (optional): if ``True``,
:class:`~datetime.datetime` instances returned as values
in a document by this :class:`MongoReplicaSetClient` will be timezone
aware (otherwise they will be naive)
- `replicaSet`: (required) The name of the replica set to connect to.
The driver will verify that each host it connects to is a member of
this replica set. Can be passed as a keyword argument or as a
MongoDB URI option.
| **Other optional parameters can be passed as keyword arguments:**
- `host`: For compatibility with :class:`~mongo_client.MongoClient`.
If both `host` and `hosts_or_uri` are specified `host` takes
precedence.
- `port`: For compatibility with :class:`~mongo_client.MongoClient`.
The default port number to use for hosts.
- `maxpoolsize` (optional): Alias for max_pool_size. Takes
precendence over max_pool_size.
- `socketTimeoutMS`: (integer or None) How long (in milliseconds) a
send or receive on a socket can take before timing out. Defaults to
``None`` (no timeout).
- `connectTimeoutMS`: (integer or None) How long (in milliseconds) a
connection can take to be opened before timing out. Defaults to
``20000``.
- `waitQueueTimeoutMS`: (integer or None) How long (in milliseconds)
a thread will wait for a socket from the pool if the pool has no
free sockets. Defaults to ``None`` (no timeout).
- `waitQueueMultiple`: (integer or None) Multiplied by max_pool_size
to give the number of threads allowed to wait for a socket at one
time. Defaults to ``None`` (no waiters).
- `socketKeepAlive`: (boolean) Whether to send periodic keep-alive
packets on connected sockets. Defaults to ``False`` (do not send
keep-alive packets).
- `connect`: if True (the default), immediately connect to MongoDB
in the foreground. Otherwise connect on the first operation.
| **Write Concern options:**
| (Only set if passed. No default values.)
- `w`: (integer or string) Write operations will block until they have
been replicated to the specified number or tagged set of servers.
`w=<int>` always includes the replica set primary (e.g. w=3 means
write to the primary and wait until replicated to **two**
secondaries). Passing w=0 **disables write acknowledgement** and all
other write concern options.
- `wtimeout`: (integer) Used in conjunction with `w`. Specify a value
in milliseconds to control how long to wait for write propagation
to complete. If replication does not complete in the given
timeframe, a timeout exception is raised.
- `j`: If ``True`` block until write operations have been committed
to the journal. Cannot be used in combination with `fsync`. Prior
to MongoDB 2.6 this option was ignored if the server was running
without journaling. Starting with MongoDB 2.6 write operations will
fail with an exception if this option is used when the server is
running without journaling.
- `fsync`: If ``True`` and the server is running without journaling,
blocks until the server has synced all data files to disk. If the
server is running with journaling, this acts the same as the `j`
option, blocking until write operations have been committed to the
journal. Cannot be used in combination with `j`.
| **Read preference options:**
- `read_preference`: The read preference for this client.
See :mod:`~pymongo.read_preferences` for available
options. Defaults to ``ReadPreference.PRIMARY``.
- `localThresholdMS`: (integer) Any replica-set member
whose ping time is within localThresholdMS of the
nearest member may accept reads. Default 15 milliseconds.
**Ignored by mongos** and must be configured on the command line.
See the localThreshold_ option for more information.
| **SSL configuration:**
- `ssl`: If ``True``, create the connection to the servers using SSL.
Defaults to ``False``.
- `ssl_keyfile`: The private keyfile used to identify the local
connection against mongod. If included with the ``certfile`` then
only the ``ssl_certfile`` is needed. Implies ``ssl=True``.
Defaults to ``None``.
- `ssl_certfile`: The certificate file used to identify the local
connection against mongod. Implies ``ssl=True``. Defaults to
``None``.
- `ssl_cert_reqs`: Specifies whether a certificate is required from
the other side of the connection, and whether it will be validated
if provided. It must be one of the three values ``ssl.CERT_NONE``
(certificates ignored), ``ssl.CERT_OPTIONAL``
(not required, but validated if provided), or ``ssl.CERT_REQUIRED``
(required and validated). If the value of this parameter is not
``ssl.CERT_NONE``, then the ``ssl_ca_certs`` parameter must point
to a file of CA certificates. Implies ``ssl=True``. Defaults to
``ssl.CERT_NONE``.
- `ssl_ca_certs`: The ca_certs file contains a set of concatenated
"certification authority" certificates, which are used to validate
certificates passed from the other end of the connection.
Implies ``ssl=True``. Defaults to ``None``.
- `ssl_match_hostname`: If ``True`` (the default), and
`ssl_cert_reqs` is not ``ssl.CERT_NONE``, enables hostname
verification using the :func:`~ssl.match_hostname` function from
python's :mod:`~ssl` module. Think very carefully before setting
this to ``False`` as that could make your application vulnerable to
man-in-the-middle attacks.
.. versionchanged:: 2.5
Added additional ssl options
.. versionadded:: 2.4
"""
self.__opts = {}
self.__seeds = set()
self.__index_cache = {}
self.__auth_credentials = {}
self.__monitor = None
self.__closed = False
# Compatibility with mongo_client.MongoClient
host = kwargs.pop('host', hosts_or_uri)
port = kwargs.pop('port', 27017)
if not isinstance(port, int):
raise TypeError("port must be an instance of int")
username = None
password = None
self.__default_database_name = None
options = {}
if host is None:
self.__seeds.add(('localhost', port))
elif '://' in host:
res = uri_parser.parse_uri(host, port)
self.__seeds.update(res['nodelist'])
username = res['username']
password = res['password']
self.__default_database_name = res['database']
options = res['options']
else:
self.__seeds.update(uri_parser.split_hosts(host, port))
# _pool_class and _monitor_class are for deep customization of PyMongo,
# e.g. Motor. SHOULD NOT BE USED BY DEVELOPERS EXTERNAL TO MONGODB.
self.pool_class = kwargs.pop('_pool_class', pool.Pool)
self.__monitor_class = kwargs.pop('_monitor_class', None)
# tag_sets is only supported through kwargs since it must be a list.
if "tag_sets" in kwargs:
warnings.warn("tag_sets is deprecated in this version of PyMongo "
"and removed in PyMongo 3. Pass a read preference "
"object as read_preference instead",
DeprecationWarning, stacklevel=2)
# Support _ServerMode through kwargs
pref = kwargs.get("read_preference")
if isinstance(pref, _ServerMode):
kwargs["read_preference"] = pref.mode
kwargs["tag_sets"] = pref.tag_sets
# URI overrides kwargs.
for option, value in kwargs.iteritems():
option, value = common.validate(option, value)
self.__opts[option] = value
self.__opts.update(options)
# Both of these work in kwargs and URI...
if ("secondary_acceptable_latency_ms" in self.__opts or
"secondaryacceptablelatencyms" in self.__opts):
warnings.warn("secondary_acceptable_latency_ms and "
"secondaryAcceptableLatencyMS are deprecated. Use "
"localThresholdMS instead",
DeprecationWarning, stacklevel=2)
self.__max_pool_size = self.__opts.get(
'maxpoolsize',
common.validate_positive_integer_or_none(
'max_pool_size', max_pool_size))
common.validate_boolean('tz_aware', tz_aware)
uuid_representation = options.pop('uuidrepresentation', PYTHON_LEGACY)
self.__opts['codec_options'] = CodecOptions(
document_class, tz_aware, uuid_representation)
self.__use_greenlets = self.__opts.get('use_greenlets', False)
if self.__use_greenlets:
if not have_gevent:
raise ConfigurationError(
"The gevent module is not available. "
"Install the gevent package from PyPI.")
warnings.warn("use_greenlets is deprecated in this version of "
"PyMongo and removed in PyMongo 3. Use Gevent's "
"monkey.patch_all() instead.",
DeprecationWarning, stacklevel=2)
self.__rs_state = RSState(self.__make_threadlocal(), initial=True)
self.__request_counter = thread_util.Counter(self.__use_greenlets)
self.__auto_start_request = self.__opts.get('auto_start_request', False)
if self.__auto_start_request:
self.start_request()
self.__name = self.__opts.get('replicaset')
if not self.__name:
raise ConfigurationError("the replicaSet "
"keyword parameter is required.")
self.__net_timeout = self.__opts.get('sockettimeoutms')
self.__conn_timeout = self.__opts.get('connecttimeoutms', 20.0)
self.__wait_queue_timeout = self.__opts.get('waitqueuetimeoutms')
self.__wait_queue_multiple = self.__opts.get('waitqueuemultiple')
self.__socket_keepalive = self.__opts.get('socketkeepalive', False)
self.__use_ssl = self.__opts.get('ssl')
self.__ssl_keyfile = self.__opts.get('ssl_keyfile')
self.__ssl_certfile = self.__opts.get('ssl_certfile')
self.__ssl_cert_reqs = self.__opts.get('ssl_cert_reqs')
self.__ssl_ca_certs = self.__opts.get('ssl_ca_certs')
self.__ssl_match_hostname = self.__opts.get('ssl_match_hostname', True)
ssl_kwarg_keys = [k for k in kwargs.keys()
if k.startswith('ssl_') and kwargs[k]]
if self.__use_ssl is False and ssl_kwarg_keys:
raise ConfigurationError("ssl has not been enabled but the "
"following ssl parameters have been set: "
"%s. Please set `ssl=True` or remove."
% ', '.join(ssl_kwarg_keys))
if self.__ssl_cert_reqs and not self.__ssl_ca_certs:
raise ConfigurationError("If `ssl_cert_reqs` is not "
"`ssl.CERT_NONE` then you must "
"include `ssl_ca_certs` to be able "
"to validate the server.")
if ssl_kwarg_keys and self.__use_ssl is None:
# ssl options imply ssl = True
self.__use_ssl = True
if self.__use_ssl and not common.HAS_SSL:
raise ConfigurationError("The ssl module is not available. If you "
"are using a python version previous to "
"2.6 you must install the ssl package "
"from PyPI.")
# localThresholdMS takes precedence over secondaryAcceptableLatencyMS
if "localthresholdms" in self.__opts:
self.__opts.pop("secondaryacceptablelatencyms", None)
self.__opts.pop("secondary_acceptable_latency_ms", None)
self.__opts["secondaryacceptablelatencyms"] = (
self.__opts["localthresholdms"])
super(MongoReplicaSetClient, self).__init__(**self.__opts)
if self.slave_okay:
warnings.warn("slave_okay is deprecated in this version of "
"PyMongo and removed in PyMongo 3. Use "
"secondaryPreferred read preference instead.",
DeprecationWarning, stacklevel=2)
_connect = self.__opts.get('_connect', self.__opts.get('connect', True))
if _connect:
try:
self.refresh(initial=True)
except AutoReconnect, e:
# ConnectionFailure makes more sense here than AutoReconnect
raise ConnectionFailure(str(e))
if username:
mechanism = options.get('authmechanism', 'DEFAULT')
source = (
options.get('authsource')
or self.__default_database_name
or 'admin')
credentials = auth._build_credentials_tuple(mechanism,
source,
username,
password,
options)
try:
self._cache_credentials(source, credentials, _connect)
except OperationFailure, exc:
raise ConfigurationError(str(exc))
# Start the monitor after we know the configuration is correct.
if not self.__monitor_class:
if self.__use_greenlets:
self.__monitor_class = MonitorGreenlet
else:
# Common case: monitor RS with a background thread.
self.__monitor_class = MonitorThread
if self.__use_greenlets:
# Greenlets don't need to lock around access to the monitor.
# A Greenlet can safely do:
# "if not self.__monitor: self.__monitor = monitor_class()"
# because it won't be interrupted between the check and the
# assignment.
self.__monitor_lock = DummyLock()
else:
self.__monitor_lock = threading.Lock()
if _connect:
self.__ensure_monitor()
def _cached(self, dbname, coll, index):
"""Test if `index` is cached.
"""
cache = self.__index_cache
now = datetime.datetime.utcnow()
return (dbname in cache and
coll in cache[dbname] and
index in cache[dbname][coll] and
now < cache[dbname][coll][index])
def _cache_index(self, dbase, collection, index, cache_for):
"""Add an index to the index cache for ensure_index operations.
"""
now = datetime.datetime.utcnow()
expire = datetime.timedelta(seconds=cache_for) + now
if dbase not in self.__index_cache:
self.__index_cache[dbase] = {}
self.__index_cache[dbase][collection] = {}
self.__index_cache[dbase][collection][index] = expire
elif collection not in self.__index_cache[dbase]:
self.__index_cache[dbase][collection] = {}
self.__index_cache[dbase][collection][index] = expire
else:
self.__index_cache[dbase][collection][index] = expire
def _purge_index(self, database_name,
collection_name=None, index_name=None):
"""Purge an index from the index cache.
If `index_name` is None purge an entire collection.
If `collection_name` is None purge an entire database.
"""
if not database_name in self.__index_cache:
return
if collection_name is None:
del self.__index_cache[database_name]
return
if not collection_name in self.__index_cache[database_name]:
return
if index_name is None:
del self.__index_cache[database_name][collection_name]
return
if index_name in self.__index_cache[database_name][collection_name]:
del self.__index_cache[database_name][collection_name][index_name]
def _cache_credentials(self, source, credentials, connect=True):
"""Add credentials to the database authentication cache
for automatic login when a socket is created. If `connect` is True,
verify the credentials on the server first.
Raises OperationFailure if other credentials are already stored for
this source.
"""
if source in self.__auth_credentials:
# Nothing to do if we already have these credentials.
if credentials == self.__auth_credentials[source]:
return
raise OperationFailure('Another user is already authenticated '
'to this database. You must logout first.')
if connect:
# Try to authenticate even during failover.
member = select_member(
self.__rs_state.members, ReadPreference.PRIMARY_PREFERRED)
if not member:
raise AutoReconnect(
"No replica set members available for authentication")
sock_info = self.__socket(member)
try:
# Since __check_auth was called in __socket
# there is no need to call it here.
auth.authenticate(credentials, sock_info, self.__simple_command)
sock_info.authset.add(credentials)
finally:
member.maybe_return_socket(sock_info)
self.__auth_credentials[source] = credentials
def _purge_credentials(self, source):
"""Purge credentials from the database authentication cache.
"""
if source in self.__auth_credentials:
del self.__auth_credentials[source]
def __check_auth(self, sock_info):
"""Authenticate using cached database credentials.
"""
if self.__auth_credentials or sock_info.authset:
cached = set(self.__auth_credentials.itervalues())
authset = sock_info.authset.copy()
# Logout any credentials that no longer exist in the cache.
for credentials in authset - cached:
self.__simple_command(sock_info, credentials[1], {'logout': 1})
sock_info.authset.discard(credentials)
for credentials in cached - authset:
auth.authenticate(credentials,
sock_info, self.__simple_command)
sock_info.authset.add(credentials)
@property
def seeds(self):
"""The seed list used to connect to this replica set.
A sequence of (host, port) pairs.
"""
return self.__seeds
@property
def hosts(self):
"""All active and passive (priority 0) replica set
members known to this client. This does not include
hidden or slaveDelay members, or arbiters.
A sequence of (host, port) pairs.
"""
return self.__rs_state.hosts
@property
def address(self):
"""The (host, port) of the current primary of the replica set.
Returns None if there is no primary.
.. versionadded:: 2.9
"""
return self.__rs_state.writer
@property
def primary(self):
"""The (host, port) of the current primary of the replica set.
Returns None if there is no primary.
"""
return self.__rs_state.writer
@property
def secondaries(self):
"""The secondary members known to this client.
A sequence of (host, port) pairs.
"""
return self.__rs_state.secondaries
@property
def arbiters(self):
"""The arbiters known to this client.
A sequence of (host, port) pairs.
"""
return self.__rs_state.arbiters
@property
def is_mongos(self):
"""If this instance is connected to mongos (always False).
.. versionadded:: 2.3
"""
return False
@property
def max_pool_size(self):
"""The maximum number of sockets the pool will open concurrently.
When the pool has reached `max_pool_size`, operations block waiting for
a socket to be returned to the pool. If ``waitQueueTimeoutMS`` is set,
a blocked operation will raise :exc:`~pymongo.errors.ConnectionFailure`
after a timeout. By default ``waitQueueTimeoutMS`` is not set.
.. warning:: SIGNIFICANT BEHAVIOR CHANGE in 2.6. Previously, this
parameter would limit only the idle sockets the pool would hold
onto, not the number of open sockets. The default has also changed
to 100.
.. versionchanged:: 2.6
"""
return self.__max_pool_size
@property
def use_greenlets(self):
"""**DEPRECATED** Whether calling :meth:`start_request` assigns
greenlet-local, rather than thread-local, sockets.
.. warning:: :attr:`use_greenlets` is deprecated in this version of
PyMongo and removed in PyMongo 3. Use Gevent's monkey.patch_all()
instead.
.. versionchanged:: 2.9
Deprecated use_greenlets.
.. versionadded:: 2.4.2
"""
warnings.warn("use_greenlets is deprecated in this version of PyMongo "
"and removed in PyMongo 3. Use Gevent's "
"monkey.patch_all() instead",
DeprecationWarning, stacklevel=2)
return self.__use_greenlets
def get_document_class(self):
"""**DEPRECATED** Default class to use for documents returned from this
client.
.. warning:: :attr:`document_class` is deprecated in this version of
PyMongo and removed in PyMongo 3. Use
:class:`~bson.codec_options.CodecOptions`
with :meth:`~pymongo.mongo_client.MongoClient.get_database`,
:meth:`~pymongo.database.Database.get_collection`,
or :meth:`~pymongo.collection.Collection.with_options` instead.
See the :doc:`/migrate-to-pymongo3` for examples.
.. versionchanged:: 2.9
Deprecated document_class.
"""
warnings.warn("document_class is deprecated in this version of "
"PyMongo and removed in PyMongo 3. See the "
"document_class docstring for more information.",
DeprecationWarning, stacklevel=2)
return self._codec_options.document_class
def set_document_class(self, klass):
"""document_class setter"""
warnings.warn("document_class is deprecated in this version of "
"PyMongo and removed in PyMongo 3. See the "
"document_class docstring for more information.",
DeprecationWarning, stacklevel=2)
tz_aware = self._codec_options.tz_aware
uuid_rep = self._codec_options.uuid_representation
self._codec_options = CodecOptions(klass, tz_aware, uuid_rep)
document_class = property(get_document_class, set_document_class)
@property
def tz_aware(self):
"""**DEPRECATED** Does this client return timezone-aware datetimes?
.. warning:: :attr:`tz_aware` is deprecated in this version of PyMongo
and removed in PyMongo 3. See :attr:`codec_options` instead.
.. versionchanged:: 2.9
Deprecated tz_aware.
"""
warnings.warn("tz_aware is deprecated in this version of PyMongo and "
"removed in PyMongo 3. Use codec_options instead.",
DeprecationWarning, stacklevel=2)
return self._codec_options.tz_aware
@property
def max_bson_size(self):
"""Returns the maximum size BSON object the connected primary
accepts in bytes. Defaults to 16MB if not connected to a
primary.
"""
rs_state = self.__rs_state
if rs_state.primary_member:
return rs_state.primary_member.max_bson_size
return common.MAX_BSON_SIZE
@property
def max_message_size(self):
"""Returns the maximum message size the connected primary
accepts in bytes. Defaults to 32MB if not connected to a
primary.
"""
rs_state = self.__rs_state
if rs_state.primary_member:
return rs_state.primary_member.max_message_size
return common.MAX_MESSAGE_SIZE
@property
def min_wire_version(self):
"""The minWireVersion reported by the server.
Returns ``0`` when connected to server versions prior to MongoDB 2.6.
.. versionadded:: 2.7
"""
rs_state = self.__rs_state
if rs_state.primary_member:
return rs_state.primary_member.min_wire_version
return common.MIN_WIRE_VERSION
@property
def max_wire_version(self):
"""The maxWireVersion reported by the server.
Returns ``0`` when connected to server versions prior to MongoDB 2.6.
.. versionadded:: 2.7
"""
rs_state = self.__rs_state
if rs_state.primary_member:
return rs_state.primary_member.max_wire_version
return common.MAX_WIRE_VERSION
@property
def max_write_batch_size(self):
"""The maxWriteBatchSize reported by the server.
Returns a default value when connected to server versions prior to
MongoDB 2.6.
.. versionadded:: 2.7
"""
rs_state = self.__rs_state
if rs_state.primary_member:
return rs_state.primary_member.max_write_batch_size
return common.MAX_WRITE_BATCH_SIZE
@property
def auto_start_request(self):
"""**DEPRECATED** Is auto_start_request enabled?
.. versionchanged:: 2.8
Deprecated auto_start_request.
"""
warnings.warn("auto_start_request is deprecated in this version of "
"PyMongo and removed in PyMongo 3.",
DeprecationWarning, stacklevel=2)
return self.__auto_start_request
@property
def local_threshold_ms(self):
"""Any replica set member whose ping time is within
:attr:`local_threshold_ms` of the nearest member may accept reads.
Defaults to 15 milliseconds.
.. versionadded:: 2.9
"""
return self._secondary_acceptable_latency_ms
def __simple_command(self, sock_info, dbname, spec):
"""Send a command to the server.
Returns (response, ping_time in seconds).
"""
ns = dbname + '.$cmd'
rqst_id, msg, _ = message.query(0, ns, 0, -1, spec)
start = time.time()
try:
sock_info.sock.sendall(msg)
response = self.__recv_msg(1, rqst_id, sock_info)
except:
sock_info.close()
raise
end = time.time()
response = helpers._unpack_response(response)['data'][0]
msg = "command %s on namespace %s failed: %%s" % (
repr(spec).replace("%", "%%"), ns)
helpers._check_command_response(response, None, msg)
return response, end - start
def __is_master(self, host):
"""Directly call ismaster.
Returns (response, connection_pool, ping_time in seconds).
"""
connection_pool = self.pool_class(
host,
self.__max_pool_size,
self.__net_timeout,
self.__conn_timeout,
self.__use_ssl,
wait_queue_timeout=self.__wait_queue_timeout,
wait_queue_multiple=self.__wait_queue_multiple,
socket_keepalive=self.__socket_keepalive,
use_greenlets=self.__use_greenlets,
ssl_keyfile=self.__ssl_keyfile,
ssl_certfile=self.__ssl_certfile,
ssl_cert_reqs=self.__ssl_cert_reqs,
ssl_ca_certs=self.__ssl_ca_certs,
ssl_match_hostname=self.__ssl_match_hostname)
if self.in_request():
connection_pool.start_request()
sock_info = connection_pool.get_socket()
try:
response, ping_time = self.__simple_command(
sock_info, 'admin', {'ismaster': 1}
)
connection_pool.maybe_return_socket(sock_info)
return response, connection_pool, ping_time
except (ConnectionFailure, socket.error):
connection_pool.discard_socket(sock_info)
raise
def __schedule_refresh(self, sync=False):
"""Awake the monitor to update our view of the replica set's state.
If `sync` is True, block until the refresh completes.
If multiple application threads call __schedule_refresh while refresh
is in progress, the work of refreshing the state is only performed
once.
"""
if self.__closed:
raise InvalidOperation('MongoReplicaSetClient has been closed')
monitor = self.__ensure_monitor()
monitor.schedule_refresh()
if sync:
monitor.wait_for_refresh(timeout_seconds=self._refresh_timeout_sec)
def __ensure_monitor(self):
"""Ensure the monitor is started, and return it."""
self.__monitor_lock.acquire()
try:
# Another thread can start the monitor while we wait for the lock.
if self.__monitor is not None and self.__monitor.isAlive():
return self.__monitor
monitor = self.__monitor = self.__monitor_class(self)
register_monitor(monitor)
monitor.start_sync()
return monitor
finally:
self.__monitor_lock.release()
def __make_threadlocal(self):
if self.__use_greenlets:
return gevent_local()
else:
return threading.local()
def refresh(self, initial=False):
"""Iterate through the existing host list, or possibly the
seed list, to update the list of hosts and arbiters in this
replica set.
"""
# Only one thread / greenlet calls refresh() at a time: the one
# running __init__() or the monitor. We won't modify the state, only
# replace it.
rs_state = self.__rs_state
try:
self.__rs_state = self.__create_rs_state(rs_state, initial)
except ConfigurationError, e:
self.__rs_state = rs_state.clone_with_error(e)
raise
def __create_rs_state(self, rs_state, initial):
errors = []
if rs_state.hosts:
# Try first those hosts we think are up, then the down ones.
nodes = sorted(
rs_state.hosts,
key=lambda host: bool(rs_state.get(host)),
reverse=True)
else:
nodes = self.__seeds
hosts = set()
# This will become the new RSState.
members = {}
arbiters = set()
writer = None
# Look for first member from which we can get a list of all members.
for node in nodes:
member, sock_info = rs_state.get(node), None
try:
if member:
sock_info = self.__socket(member, force=True)
response, ping_time = self.__simple_command(
sock_info, 'admin', {'ismaster': 1})
member.maybe_return_socket(sock_info)
new_member = member.clone_with(response, ping_time)
else:
response, pool, ping_time = self.__is_master(node)
new_member = Member(
node, pool, response, MovingAverage([ping_time]))
# Check that this host is part of the given replica set.
# Fail fast if we find a bad seed during __init__.
# Regular refreshes keep searching for valid nodes.
if response.get('setName') != self.__name:
if initial:
host, port = node
raise ConfigurationError("%s:%d is not a member of "
"replica set %s"
% (host, port, self.__name))
else:
continue
if "arbiters" in response:
arbiters = set([
_partition_node(h) for h in response["arbiters"]])
if "hosts" in response:
hosts.update([_partition_node(h)
for h in response["hosts"]])
if "passives" in response:
hosts.update([_partition_node(h)
for h in response["passives"]])
# Start off the new 'members' dict with this member
# but don't add seed list members.
if node in hosts:
members[node] = new_member
if response['ismaster']:
writer = node
except (ConnectionFailure, socket.error, OperationFailure), why:
# Member unreachable, or transient auth failure while member
# is resyncing credentials.
if member:
member.discard_socket(sock_info)
errors.append("%s:%d: %s" % (node[0], node[1], str(why)))
if hosts:
break
else:
# We've changed nothing. On the next refresh, we'll try the same
# list of hosts: rs_state.hosts or self.__seeds.
if errors:
raise AutoReconnect(', '.join(errors))
raise ConfigurationError('No suitable hosts found')
# Ensure we have a pool for each member, and find the primary.
for host in hosts:
if host in members:
# This member was the first we connected to, in the loop above.
continue
member, sock_info = rs_state.get(host), None
try:
if member:
sock_info = self.__socket(member, force=True)
res, ping_time = self.__simple_command(
sock_info, 'admin', {'ismaster': 1})
if res.get('setName') != self.__name:
# Not a member of this set.
continue
member.maybe_return_socket(sock_info)
new_member = member.clone_with(res, ping_time)
else:
res, connection_pool, ping_time = self.__is_master(host)
if res.get('setName') != self.__name:
# Not a member of this set.
continue
new_member = Member(
host, connection_pool, res, MovingAverage([ping_time]))
members[host] = new_member
except (ConnectionFailure, socket.error, OperationFailure):
# Member unreachable, or transient auth failure while member
# is resyncing credentials.
if member:
member.discard_socket(sock_info)
continue
if res['ismaster']:
writer = host
if not members:
# In the first loop, we connected to a member in the seed list
# and got a host list, but couldn't reach any members in that
# list.
raise AutoReconnect(
"Couldn't reach any hosts in %s. Replica set is"
" configured with internal hostnames or IPs?"
% list(hosts))
if writer == rs_state.writer:
threadlocal = self.__rs_state.threadlocal
else:
# We unpin threads from members if the primary has changed, since
# no monotonic consistency can be promised now anyway.
threadlocal = self.__make_threadlocal()
# Get list of hosts in the RS config, including unreachable ones.
# Prefer the primary's list, otherwise any member's list.
if writer:
response = members[writer].ismaster_response
elif members:
response = members.values()[0].ismaster_response
else:
response = {}
final_host_list = (
response.get('hosts', [])
+ response.get('passives', []))
# Replace old state with new.
return RSState(
threadlocal,
[_partition_node(h) for h in final_host_list],
members,
arbiters,
writer)
def __get_rs_state(self):
rs_state = self.__rs_state
if rs_state.exc:
raise rs_state.exc
return rs_state
def __find_primary(self):
"""Returns a connection to the primary of this replica set,
if one exists, or raises AutoReconnect.
"""
rs_state = self.__get_rs_state()
primary = rs_state.primary_member
if primary:
return primary
# We had a failover.
self.__schedule_refresh(sync=True)
# Try again. This time copy the RSState reference so we're guaranteed
# primary_member and error_message are from the same state.
rs_state = self.__get_rs_state()
if rs_state.primary_member:
return rs_state.primary_member
# Couldn't find the primary.
raise AutoReconnect(rs_state.error_message)
def __socket(self, member, force=False):
"""Get a SocketInfo from the pool.
"""
if self.__auto_start_request and not self.in_request():
self.start_request()
sock_info = member.get_socket(force=force)
try:
self.__check_auth(sock_info)
except:
# No matter whether an auth failure or network error, increment
# the pool's semaphore by returning the socket.
member.maybe_return_socket(sock_info)
raise
return sock_info
def _ensure_connected(self, sync=False):
"""Ensure this client instance is connected to a primary.
"""
# This may be the first time we're connecting to the set.
self.__ensure_monitor()
if sync:
rs_state = self.__rs_state
if rs_state.exc or not rs_state.primary_member:
self.__schedule_refresh(sync)
def disconnect(self):
"""Disconnect from the replica set primary, unpin all members, and
refresh our view of the replica set.
"""
rs_state = self.__rs_state
if rs_state.primary_member:
rs_state.primary_member.reset()
threadlocal = self.__make_threadlocal()
self.__rs_state = rs_state.clone_without_writer(threadlocal)
self.__schedule_refresh()
def close(self):
"""Close this client instance.
This method first terminates the replica set monitor, then disconnects
from all members of the replica set. No further operations are
permitted on this client.
.. warning:: This method stops the replica set monitor task. The
replica set monitor is required to properly handle replica set
configuration changes, including a failure of the primary.
Once :meth:`~close` is called this client instance must not be
reused.
.. versionchanged:: 2.2.1
The :meth:`close` method now terminates the replica set monitor.
"""
self.__closed = True
self.__rs_state = RSState(self.__make_threadlocal())
monitor, self.__monitor = self.__monitor, None
if monitor:
monitor.shutdown()
# Use a reasonable timeout.
monitor.join(1.0)
def alive(self):
"""**DEPRECATED** Return ``False`` if there has been an error
communicating with the primary, else ``True``.
This method attempts to check the status of the primary with minimal
I/O. The current thread / greenlet retrieves a socket from the
primary's connection pool and checks whether calling select_ on it
raises an error. If there are currently no idle sockets,
:meth:`alive` attempts to connect a new socket.
A more certain way to determine primary availability is to ping it::
client.admin.command('ping')
.. _select: http://docs.python.org/2/library/select.html#select.select
"""
warnings.warn("alive is deprecated in this version of PyMongo and "
"removed in PyMongo 3.",
DeprecationWarning, stacklevel=2)
# In the common case, a socket is available and was used recently, so
# calling select() on it is a reasonable attempt to see if the OS has
# reported an error.
primary, sock_info = None, None
try:
try:
rs_state = self.__get_rs_state()
primary = rs_state.primary_member
if not primary:
return False
else:
sock_info = self.__socket(primary)
return not pool._closed(sock_info.sock)
except (socket.error, ConnectionFailure):
return False
finally:
if primary:
primary.maybe_return_socket(sock_info)
def __check_response_to_last_error(self, response, is_command):
"""Check a response to a lastError message for errors.
`response` is a byte string representing a response to the message.
If it represents an error response we raise OperationFailure.
Return the response as a document.
"""
response = helpers._unpack_response(response)
assert response["number_returned"] == 1
result = response["data"][0]
helpers._check_command_response(result, self.disconnect)
# write commands - skip getLastError checking
if is_command:
return result
# getLastError
error_msg = result.get("err", "")
if error_msg is None:
return result
if error_msg.startswith("not master"):
self.disconnect()
raise AutoReconnect(error_msg)
code = result.get("code")
if code in (11000, 11001, 12582):
raise DuplicateKeyError(result["err"], code, result)
raise OperationFailure(result["err"], code, result)
def __recv_data(self, length, sock_info):
"""Lowest level receive operation.
Takes length to receive and repeatedly calls recv until able to
return a buffer of that length, raising ConnectionFailure on error.
"""
message = EMPTY
while length:
chunk = sock_info.sock.recv(length)
if chunk == EMPTY:
raise ConnectionFailure("connection closed")
length -= len(chunk)
message += chunk
return message
def __recv_msg(self, operation, rqst_id, sock):
"""Receive a message in response to `rqst_id` on `sock`.
Returns the response data with the header removed.
"""
header = self.__recv_data(16, sock)
length = struct.unpack("<i", header[:4])[0]
actual_op = struct.unpack("<i", header[12:])[0]
assert actual_op == operation, (
"wire protocol error: unknown opcode %r" % (actual_op,))
# No rqst_id for exhaust cursor "getMore".
if rqst_id is not None:
resp_id = struct.unpack("<i", header[8:12])[0]
assert rqst_id == resp_id, (
"wire protocol error: got response id %r but expected %r"
% (resp_id, rqst_id))
assert length > 16, ("wire protocol error: message length is shorter"
" than standard message header: %r" % (length,))
return self.__recv_data(length - 16, sock)
def __check_bson_size(self, msg, max_size):
"""Make sure the message doesn't include BSON documents larger
than the connected server will accept.
:Parameters:
- `msg`: message to check
"""
if len(msg) == 3:
request_id, data, max_doc_size = msg
if max_doc_size > max_size:
raise DocumentTooLarge("BSON document too large (%d bytes)"
" - the connected server supports"
" BSON document sizes up to %d"
" bytes." %
(max_doc_size, max_size))
return (request_id, data)
# get_more and kill_cursors messages
# don't include BSON documents.
return msg
def _send_message(self, msg, with_last_error=False,
command=False, _connection_to_use=None):
"""Say something to Mongo.
Raises ConnectionFailure if the message cannot be sent. Raises
OperationFailure if `with_last_error` is ``True`` and the
response to the getLastError call returns an error. Return the
response from lastError, or ``None`` if `with_last_error` is
``False``.
:Parameters:
- `msg`: message to send
- `with_last_error`: check getLastError status after sending the
message
"""
self._ensure_connected()
if _connection_to_use in (None, -1):
member = self.__find_primary()
else:
member = self.__get_rs_state().get(_connection_to_use)
sock_info = None
try:
try:
sock_info = self.__socket(member)
rqst_id, data = self.__check_bson_size(
msg, member.max_bson_size)
sock_info.sock.sendall(data)
# Safe mode. We pack the message together with a lastError
# message and send both. We then get the response (to the
# lastError) and raise OperationFailure if it is an error
# response.
rv = None
if with_last_error:
response = self.__recv_msg(1, rqst_id, sock_info)
rv = self.__check_response_to_last_error(response, command)
return rv
except OperationFailure:
raise
except(ConnectionFailure, socket.error), why:
member.discard_socket(sock_info)
if _connection_to_use in (None, -1):
self.disconnect()
raise AutoReconnect(str(why))
except:
sock_info.close()
raise
finally:
member.maybe_return_socket(sock_info)
def __send_and_receive(self, member, msg, **kwargs):
"""Send a message on the given socket and return the response data.
Can raise socket.error.
"""
sock_info = None
exhaust = kwargs.get('exhaust')
rqst_id, data = self.__check_bson_size(msg, member.max_bson_size)
try:
sock_info = self.__socket(member)
if not exhaust and "network_timeout" in kwargs:
sock_info.sock.settimeout(kwargs['network_timeout'])
sock_info.sock.sendall(data)
response = self.__recv_msg(1, rqst_id, sock_info)
if not exhaust:
if "network_timeout" in kwargs:
sock_info.sock.settimeout(self.__net_timeout)
member.maybe_return_socket(sock_info)
return response, sock_info, member.pool
except:
if sock_info is not None:
sock_info.close()
member.maybe_return_socket(sock_info)
raise
def __try_read(self, member, msg, **kwargs):
"""Attempt a read from a member; on failure mark the member "down" and
wake up the monitor thread to refresh as soon as possible.
"""
try:
return self.__send_and_receive(member, msg, **kwargs)
except socket.timeout, e:
# Could be one slow query, don't refresh.
host, port = member.host
raise AutoReconnect("%s:%d: %s" % (host, port, e))
except (socket.error, ConnectionFailure), why:
# Try to replace our RSState with a clone where this member is
# marked "down", to reduce exceptions on other threads, or repeated
# exceptions on this thread. We accept that there's a race
# condition (another thread could be replacing our state with a
# different version concurrently) but this approach is simple and
# lock-free.
self.__rs_state = self.__rs_state.clone_with_host_down(
member.host, str(why))
self.__schedule_refresh()
host, port = member.host
raise AutoReconnect("%s:%d: %s" % (host, port, why))
def _send_message_with_response(self, msg, _connection_to_use=None,
_must_use_master=False, **kwargs):
"""Send a message to Mongo and return the response.
Sends the given message and returns (host used, response).
:Parameters:
- `msg`: (request_id, data) pair making up the message to send
- `_connection_to_use`: Optional (host, port) of member for message,
used by Cursor for getMore and killCursors messages.
- `_must_use_master`: If True, send to primary.
"""
self._ensure_connected()
rs_state = self.__get_rs_state()
tag_sets = kwargs.get('tag_sets', [{}])
mode = kwargs.get('read_preference', ReadPreference.PRIMARY)
if _must_use_master:
mode = ReadPreference.PRIMARY
tag_sets = [{}]
if not rs_state.primary_member:
# If we were initialized with _connect=False then connect now.
# Otherwise, the primary was down last we checked. Start a refresh
# if one is not already in progress. If caller requested the
# primary, wait to see if it's up, otherwise continue with
# known-good members.
sync = (rs_state.initial or mode == ReadPreference.PRIMARY)
self.__schedule_refresh(sync=sync)
rs_state = self.__rs_state
latency = kwargs.get(
'secondary_acceptable_latency_ms',
self.secondary_acceptable_latency_ms)
try:
if _connection_to_use is not None:
if _connection_to_use == -1:
member = rs_state.primary_member
error_message = rs_state.error_message
else:
member = rs_state.get(_connection_to_use)
error_message = '%s:%s not available' % _connection_to_use
if not member:
raise AutoReconnect(error_message)
return member.pool.pair, self.__try_read(
member, msg, **kwargs)
except AutoReconnect:
if _connection_to_use in (-1, rs_state.writer):
# Primary's down. Refresh.
self.disconnect()
raise
# To provide some monotonic consistency, we use the same member as
# long as this thread is in a request and all reads use the same
# mode, tags, and latency. The member gets unpinned if pref changes,
# if member changes state, if we detect a failover, or if this thread
# calls end_request().
errors = []
pinned_host = rs_state.pinned_host
pinned_member = rs_state.get(pinned_host)
if (pinned_member
and pinned_member.matches_mode(mode)
and pinned_member.matches_tag_sets(tag_sets) # TODO: REMOVE?
and rs_state.keep_pinned_host(mode, tag_sets, latency)):
try:
return (
pinned_member.host,
self.__try_read(pinned_member, msg, **kwargs))
except AutoReconnect, why:
if _must_use_master or mode == ReadPreference.PRIMARY:
self.disconnect()
raise
else:
errors.append(str(why))
# No pinned member, or pinned member down or doesn't match read pref
rs_state.unpin_host()
members = list(rs_state.members)
while len(errors) < MAX_RETRY:
member = select_member(
members=members,
mode=mode,
tag_sets=tag_sets,
latency=latency)
if not member:
# Ran out of members to try
break
try:
# Removes member on failure, so select_member won't retry it.
response = self.__try_read(member, msg, **kwargs)
# Success
if self.in_request():
# Keep reading from this member in this thread / greenlet
# unless read preference changes
rs_state.pin_host(member.host, mode, tag_sets, latency)
return member.host, response
except AutoReconnect, why:
if mode == ReadPreference.PRIMARY:
raise
errors.append(str(why))
members.remove(member)
# Ran out of tries
if mode == ReadPreference.PRIMARY:
msg = "No replica set primary available for query"
elif mode == ReadPreference.SECONDARY:
msg = "No replica set secondary available for query"
else:
msg = "No replica set members available for query"
msg += " with ReadPreference %s" % modes[mode]
if tag_sets != [{}]:
msg += " and tags " + repr(tag_sets)
# Format a message like:
# 'No replica set secondary available for query with ReadPreference
# SECONDARY. host:27018: timed out, host:27019: timed out'.
if errors:
msg += ". " + ', '.join(errors)
raise AutoReconnect(msg, errors)
def _exhaust_next(self, sock_info):
"""Used with exhaust cursors to get the next batch off the socket.
Can raise AutoReconnect.
"""
try:
return self.__recv_msg(1, None, sock_info)
except socket.error, e:
raise AutoReconnect(str(e))
def start_request(self):
"""**DEPRECATED**: start_request is removed in PyMongo 3.0.
When doing w=0 writes to MongoDB 2.4 or earlier, :meth:`start_request`
was sometimes useful to ensure the current thread always used the same
socket until it called :meth:`end_request`. This made consistent reads
more likely after an unacknowledged write. Requests are no longer
useful in modern MongoDB applications, see
`PYTHON-785 <https://jira.mongodb.org/browse/PYTHON-785>`_.
.. warning:: :meth:`start_request`, :meth:`in_request`,
and :meth:`end_request` are deprecated, and removed in PyMongo 3.
See the :doc:`/migrate-to-pymongo3` for more information.
.. versionchanged:: 2.8
Deprecated start_request.
.. versionadded:: 2.2
The :class:`~pymongo.pool.Request` return value.
:meth:`start_request` previously returned None
"""
# We increment our request counter's thread- or greenlet-local value
# for every call to start_request; however, we only call each pool's
# start_request once to start a request, and call each pool's
# end_request once to end it. We don't let pools' request counters
# exceed 1. This keeps things sane when we create and delete pools
# within a request.
if 1 == self.__request_counter.inc():
for member in self.__rs_state.members:
member.start_request()
return pool.Request(self)
def in_request(self):
"""**DEPRECATED**: True if :meth:`start_request` has been called, but
not :meth:`end_request`, or if `auto_start_request` is True and
:meth:`end_request` has not been called in this thread or greenlet.
.. warning:: :meth:`start_request`, :meth:`in_request`,
and :meth:`end_request` are deprecated in this version of PyMongo
and removed in PyMongo 3. See the :doc:`/migrate-to-pymongo3` for more
information.
.. versionchanged:: 2.8
Deprecated in_request.
"""
return bool(self.__request_counter.get())
def end_request(self):
"""**DEPRECATED**: Undo :meth:`start_request`. If :meth:`end_request`
is called as many times as :meth:`start_request`, the request is over
and this thread's connection returns to the pool. Extra calls to
:meth:`end_request` have no effect.
Ending a request allows the :class:`~socket.socket` that has
been reserved for this thread by :meth:`start_request` to be returned to
the pool. Other threads will then be able to re-use that
:class:`~socket.socket`. If your application uses many threads, or has
long-running threads that infrequently perform MongoDB operations, then
judicious use of this method can lead to performance gains. Care should
be taken, however, to make sure that :meth:`end_request` is not called
in the middle of a sequence of operations in which ordering is
important. This could lead to unexpected results.
.. warning:: :meth:`start_request`, :meth:`in_request`,
and :meth:`end_request` are deprecated in this version of PyMongo and
removed in PyMongo 3. See the :doc:`/migrate-to-pymongo3` for more
information.
.. versionchanged:: 2.8
Deprecated end_request.
"""
rs_state = self.__rs_state
if 0 == self.__request_counter.dec():
for member in rs_state.members:
# No effect if not in a request
member.end_request()
rs_state.unpin_host()
def __eq__(self, other):
# XXX: Implement this?
return NotImplemented
def __ne__(self, other):
return NotImplemented
def __repr__(self):
return "MongoReplicaSetClient(%r)" % (["%s:%d" % n
for n in self.hosts],)
def __getattr__(self, name):
"""Get a database by name.
Raises :class:`~pymongo.errors.InvalidName` if an invalid
database name is used.
:Parameters:
- `name`: the name of the database to get
"""
return database.Database(self, name)
def __getitem__(self, name):
"""Get a database by name.
Raises :class:`~pymongo.errors.InvalidName` if an invalid
database name is used.
:Parameters:
- `name`: the name of the database to get
"""
return self.__getattr__(name)
def close_cursor(self, cursor_id, _conn_id):
"""Close a single database cursor.
Raises :class:`TypeError` if `cursor_id` is not an instance of
``(int, long)``.
:Parameters:
- `cursor_id`: id of cursor to close
"""
if not isinstance(cursor_id, (int, long)):
raise TypeError("cursor_id must be an instance of (int, long)")
member = self.__get_rs_state().get(_conn_id)
# We can't risk taking the lock to reconnect if we're being called
# from Cursor.__del__, see PYTHON-799.
if not member:
warnings.warn("not connected, couldn't close cursor",
stacklevel=2)
return
_, kill_cursors_msg = message.kill_cursors([cursor_id])
sock_info = self.__socket(member)
try:
try:
sock_info.sock.sendall(kill_cursors_msg)
except:
sock_info.close()
raise
finally:
member.maybe_return_socket(sock_info)
def server_info(self):
"""Get information about the MongoDB primary we're connected to.
"""
return self.admin.command("buildinfo",
read_preference=ReadPreference.PRIMARY)
def database_names(self):
"""Get a list of the names of all databases on the connected server.
"""
return [db["name"] for db in
self.admin.command(
"listDatabases",
read_preference=ReadPreference.PRIMARY)["databases"]]
def drop_database(self, name_or_database):
"""Drop a database.
Raises :class:`TypeError` if `name_or_database` is not an instance of
:class:`basestring` (:class:`str` in python 3) or Database
:Parameters:
- `name_or_database`: the name of a database to drop, or a
:class:`~pymongo.database.Database` instance representing the
database to drop
"""
name = name_or_database
if isinstance(name, database.Database):
name = name.name
if not isinstance(name, basestring):
raise TypeError("name_or_database must be an instance of "
"%s or Database" % (basestring.__name__,))
self._purge_index(name)
self[name].command("dropDatabase",
read_preference=ReadPreference.PRIMARY)
def copy_database(self, from_name, to_name,
from_host=None, username=None, password=None,
mechanism='DEFAULT'):
"""**DEPRECATED**: Copy a database, potentially from another host.
Raises :class:`TypeError` if `from_name` or `to_name` is not
an instance of :class:`basestring` (:class:`str` in python 3).
Raises :class:`~pymongo.errors.InvalidName` if `to_name` is
not a valid database name.
If `from_host` is ``None`` the current host is used as the
source. Otherwise the database is copied from `from_host`.
If the source database requires authentication, `username` and
`password` must be specified. By default, use SCRAM-SHA-1 with
MongoDB 3.0 and later, MONGODB-CR (MongoDB Challenge Response
protocol) for older servers.
.. warning:: :meth:`copy_database` is removed in PyMongo 3.0. See the
:doc:`copy_database examples </examples/copydb>` for alternatives.
:Parameters:
- `from_name`: the name of the source database
- `to_name`: the name of the target database
- `from_host` (optional): host name to copy from
- `username` (optional): username for source database
- `password` (optional): password for source database
- `mechanism` (optional): auth method, 'MONGODB-CR' or 'SCRAM-SHA-1'
.. seealso:: The :doc:`copy_database examples </examples/copydb>`.
.. versionchanged:: 2.8
Deprecated copy_database, and added SCRAM-SHA-1 support.
"""
member = self.__find_primary()
sock_info = self.__socket(member)
try:
helpers._copy_database(
fromdb=from_name,
todb=to_name,
fromhost=from_host,
mechanism=mechanism,
username=username,
password=password,
sock_info=sock_info,
cmd_func=self.__simple_command)
finally:
member.pool.maybe_return_socket(sock_info)
def get_default_database(self):
"""Get the database named in the MongoDB connection URI.
>>> uri = 'mongodb://host/my_database'
>>> client = MongoReplicaSetClient(uri)
>>> db = client.get_default_database()
>>> assert db.name == 'my_database'
Useful in scripts where you want to choose which database to use
based only on the URI in a configuration file.
"""
if self.__default_database_name is None:
raise ConfigurationError('No default database defined')
return self[self.__default_database_name]
def get_database(self, name, codec_options=None,
read_preference=None, write_concern=None):
"""Get a :class:`~pymongo.database.Database` with the given name and
options.
Useful for creating a :class:`~pymongo.database.Database` with
different codec options, read preference, and/or write concern from
this :class:`MongoClient`.
>>> from pymongo import ReadPreference
>>> client.read_preference == ReadPreference.PRIMARY
True
>>> db1 = client.test
>>> db1.read_preference == ReadPreference.PRIMARY
True
>>> db2 = client.get_database(
... 'test', read_preference=ReadPreference.SECONDARY)
>>> db2.read_preference == ReadPreference.SECONDARY
True
:Parameters:
- `name`: The name of the database - a string.
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`. If ``None`` (the
default) the :attr:`codec_options` of this :class:`MongoClient` is
used.
- `read_preference` (optional): The read preference to use. If
``None`` (the default) the :attr:`read_preference` of this
:class:`MongoClient` is used. See :mod:`~pymongo.read_preferences`
for options.
- `write_concern` (optional): An instance of
:class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the
default) the :attr:`write_concern` of this :class:`MongoClient` is
used.
.. versionadded:: 2.9
"""
return database.Database(
self, name, codec_options, read_preference, write_concern)
| agpl-3.0 |
Jayflux/servo | tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_server.py | 22 | 1342 | import unittest
import pytest
from six.moves.urllib.error import HTTPError
wptserve = pytest.importorskip("wptserve")
from .base import TestUsingServer
class TestFileHandler(TestUsingServer):
def test_not_handled(self):
with self.assertRaises(HTTPError) as cm:
resp = self.request("/not_existing")
self.assertEqual(cm.exception.code, 404)
class TestRewriter(TestUsingServer):
def test_rewrite(self):
@wptserve.handlers.handler
def handler(request, response):
return request.request_path
route = ("GET", "/test/rewritten", handler)
self.server.rewriter.register("GET", "/test/original", route[1])
self.server.router.register(*route)
resp = self.request("/test/original")
self.assertEqual(200, resp.getcode())
self.assertEqual("/test/rewritten", resp.read())
class TestRequestHandler(TestUsingServer):
def test_exception(self):
@wptserve.handlers.handler
def handler(request, response):
raise Exception
route = ("GET", "/test/raises", handler)
self.server.router.register(*route)
with self.assertRaises(HTTPError) as cm:
resp = self.request("/test/raises")
self.assertEqual(cm.exception.code, 500)
if __name__ == "__main__":
unittest.main()
| mpl-2.0 |
djenniex/CouchPotatoServer | libs/chardet/euckrfreq.py | 3121 | 45978 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKRCharToFreqOrder = ( \
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
#Everything below is of no interest for detection purpose
2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
8736,8737,8738,8739,8740,8741)
# flake8: noqa
| gpl-3.0 |
40423224/2017springcd_hw | plugin/render_math/math.py | 283 | 14202 | # -*- coding: utf-8 -*-
"""
Math Render Plugin for Pelican
==============================
This plugin allows your site to render Math. It uses
the MathJax JavaScript engine.
For markdown, the plugin works by creating a Markdown
extension which is used during the markdown compilation
stage. Math therefore gets treated like a "first class
citizen" in Pelican
For reStructuredText, the plugin instructs the rst engine
to output Mathjax for all math.
The mathjax script is by default automatically inserted
into the HTML.
Typogrify Compatibility
-----------------------
This plugin now plays nicely with Typogrify, but it
requires Typogrify version 2.07 or above.
User Settings
-------------
Users are also able to pass a dictionary of settings
in the settings file which will control how the MathJax
library renders things. This could be very useful for
template builders that want to adjust the look and feel of
the math. See README for more details.
"""
import os
import sys
from pelican import signals, generators
try:
from bs4 import BeautifulSoup
except ImportError as e:
BeautifulSoup = None
try:
from . pelican_mathjax_markdown_extension import PelicanMathJaxExtension
except ImportError as e:
PelicanMathJaxExtension = None
def process_settings(pelicanobj):
"""Sets user specified MathJax settings (see README for more details)"""
mathjax_settings = {}
# NOTE TO FUTURE DEVELOPERS: Look at the README and what is happening in
# this function if any additional changes to the mathjax settings need to
# be incorporated. Also, please inline comment what the variables
# will be used for
# Default settings
mathjax_settings['auto_insert'] = True # if set to true, it will insert mathjax script automatically into content without needing to alter the template.
mathjax_settings['align'] = 'center' # controls alignment of of displayed equations (values can be: left, right, center)
mathjax_settings['indent'] = '0em' # if above is not set to 'center', then this setting acts as an indent
mathjax_settings['show_menu'] = 'true' # controls whether to attach mathjax contextual menu
mathjax_settings['process_escapes'] = 'true' # controls whether escapes are processed
mathjax_settings['latex_preview'] = 'TeX' # controls what user sees while waiting for LaTex to render
mathjax_settings['color'] = 'inherit' # controls color math is rendered in
mathjax_settings['linebreak_automatic'] = 'false' # Set to false by default for performance reasons (see http://docs.mathjax.org/en/latest/output.html#automatic-line-breaking)
mathjax_settings['tex_extensions'] = '' # latex extensions that can be embedded inside mathjax (see http://docs.mathjax.org/en/latest/tex.html#tex-and-latex-extensions)
mathjax_settings['responsive'] = 'false' # Tries to make displayed math responsive
mathjax_settings['responsive_break'] = '768' # The break point at which it math is responsively aligned (in pixels)
mathjax_settings['mathjax_font'] = 'default' # forces mathjax to use the specified font.
mathjax_settings['process_summary'] = BeautifulSoup is not None # will fix up summaries if math is cut off. Requires beautiful soup
mathjax_settings['force_tls'] = 'false' # will force mathjax to be served by https - if set as False, it will only use https if site is served using https
mathjax_settings['message_style'] = 'normal' # This value controls the verbosity of the messages in the lower left-hand corner. Set it to "none" to eliminate all messages
# Source for MathJax
mathjax_settings['source'] = "'//cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML'"
# Get the user specified settings
try:
settings = pelicanobj.settings['MATH_JAX']
except:
settings = None
# If no settings have been specified, then return the defaults
if not isinstance(settings, dict):
return mathjax_settings
# The following mathjax settings can be set via the settings dictionary
for key, value in ((key, settings[key]) for key in settings):
# Iterate over dictionary in a way that is compatible with both version 2
# and 3 of python
if key == 'align':
try:
typeVal = isinstance(value, basestring)
except NameError:
typeVal = isinstance(value, str)
if not typeVal:
continue
if value == 'left' or value == 'right' or value == 'center':
mathjax_settings[key] = value
else:
mathjax_settings[key] = 'center'
if key == 'indent':
mathjax_settings[key] = value
if key == 'show_menu' and isinstance(value, bool):
mathjax_settings[key] = 'true' if value else 'false'
if key == 'message_style':
mathjax_settings[key] = value if value is not None else 'none'
if key == 'auto_insert' and isinstance(value, bool):
mathjax_settings[key] = value
if key == 'process_escapes' and isinstance(value, bool):
mathjax_settings[key] = 'true' if value else 'false'
if key == 'latex_preview':
try:
typeVal = isinstance(value, basestring)
except NameError:
typeVal = isinstance(value, str)
if not typeVal:
continue
mathjax_settings[key] = value
if key == 'color':
try:
typeVal = isinstance(value, basestring)
except NameError:
typeVal = isinstance(value, str)
if not typeVal:
continue
mathjax_settings[key] = value
if key == 'linebreak_automatic' and isinstance(value, bool):
mathjax_settings[key] = 'true' if value else 'false'
if key == 'process_summary' and isinstance(value, bool):
if value and BeautifulSoup is None:
print("BeautifulSoup4 is needed for summaries to be processed by render_math\nPlease install it")
value = False
mathjax_settings[key] = value
if key == 'responsive' and isinstance(value, bool):
mathjax_settings[key] = 'true' if value else 'false'
if key == 'force_tls' and isinstance(value, bool):
mathjax_settings[key] = 'true' if value else 'false'
if key == 'responsive_break' and isinstance(value, int):
mathjax_settings[key] = str(value)
if key == 'tex_extensions' and isinstance(value, list):
# filter string values, then add '' to them
try:
value = filter(lambda string: isinstance(string, basestring), value)
except NameError:
value = filter(lambda string: isinstance(string, str), value)
value = map(lambda string: "'%s'" % string, value)
mathjax_settings[key] = ',' + ','.join(value)
if key == 'mathjax_font':
try:
typeVal = isinstance(value, basestring)
except NameError:
typeVal = isinstance(value, str)
if not typeVal:
continue
value = value.lower()
if value == 'sanserif':
value = 'SansSerif'
elif value == 'fraktur':
value = 'Fraktur'
elif value == 'typewriter':
value = 'Typewriter'
else:
value = 'default'
mathjax_settings[key] = value
return mathjax_settings
def process_summary(article):
"""Ensures summaries are not cut off. Also inserts
mathjax script so that math will be rendered"""
summary = article._get_summary()
summary_parsed = BeautifulSoup(summary, 'html.parser')
math = summary_parsed.find_all(class_='math')
if len(math) > 0:
last_math_text = math[-1].get_text()
if len(last_math_text) > 3 and last_math_text[-3:] == '...':
content_parsed = BeautifulSoup(article._content, 'html.parser')
full_text = content_parsed.find_all(class_='math')[len(math)-1].get_text()
math[-1].string = "%s ..." % full_text
summary = summary_parsed.decode()
article._summary = "%s<script type='text/javascript'>%s</script>" % (summary, process_summary.mathjax_script)
def configure_typogrify(pelicanobj, mathjax_settings):
"""Instructs Typogrify to ignore math tags - which allows Typogrify
to play nicely with math related content"""
# If Typogrify is not being used, then just exit
if not pelicanobj.settings.get('TYPOGRIFY', False):
return
try:
import typogrify
from distutils.version import LooseVersion
if LooseVersion(typogrify.__version__) < LooseVersion('2.0.7'):
raise TypeError('Incorrect version of Typogrify')
from typogrify.filters import typogrify
# At this point, we are happy to use Typogrify, meaning
# it is installed and it is a recent enough version
# that can be used to ignore all math
# Instantiate markdown extension and append it to the current extensions
pelicanobj.settings['TYPOGRIFY_IGNORE_TAGS'].extend(['.math', 'script']) # ignore math class and script
except (ImportError, TypeError) as e:
pelicanobj.settings['TYPOGRIFY'] = False # disable Typogrify
if isinstance(e, ImportError):
print("\nTypogrify is not installed, so it is being ignored.\nIf you want to use it, please install via: pip install typogrify\n")
if isinstance(e, TypeError):
print("\nA more recent version of Typogrify is needed for the render_math module.\nPlease upgrade Typogrify to the latest version (anything equal or above version 2.0.7 is okay).\nTypogrify will be turned off due to this reason.\n")
def process_mathjax_script(mathjax_settings):
"""Load the mathjax script template from file, and render with the settings"""
# Read the mathjax javascript template from file
with open (os.path.dirname(os.path.realpath(__file__))
+ '/mathjax_script_template', 'r') as mathjax_script_template:
mathjax_template = mathjax_script_template.read()
return mathjax_template.format(**mathjax_settings)
def mathjax_for_markdown(pelicanobj, mathjax_script, mathjax_settings):
"""Instantiates a customized markdown extension for handling mathjax
related content"""
# Create the configuration for the markdown template
config = {}
config['mathjax_script'] = mathjax_script
config['math_tag_class'] = 'math'
config['auto_insert'] = mathjax_settings['auto_insert']
# Instantiate markdown extension and append it to the current extensions
try:
pelicanobj.settings['MD_EXTENSIONS'].append(PelicanMathJaxExtension(config))
except:
sys.excepthook(*sys.exc_info())
sys.stderr.write("\nError - the pelican mathjax markdown extension failed to configure. MathJax is non-functional.\n")
sys.stderr.flush()
def mathjax_for_rst(pelicanobj, mathjax_script):
"""Setup math for RST"""
docutils_settings = pelicanobj.settings.get('DOCUTILS_SETTINGS', {})
docutils_settings['math_output'] = 'MathJax'
pelicanobj.settings['DOCUTILS_SETTINGS'] = docutils_settings
rst_add_mathjax.mathjax_script = mathjax_script
def pelican_init(pelicanobj):
"""
Loads the mathjax script according to the settings.
Instantiate the Python markdown extension, passing in the mathjax
script as config parameter.
"""
# Process settings, and set global var
mathjax_settings = process_settings(pelicanobj)
# Generate mathjax script
mathjax_script = process_mathjax_script(mathjax_settings)
# Configure Typogrify
configure_typogrify(pelicanobj, mathjax_settings)
# Configure Mathjax For Markdown
if PelicanMathJaxExtension:
mathjax_for_markdown(pelicanobj, mathjax_script, mathjax_settings)
# Configure Mathjax For RST
mathjax_for_rst(pelicanobj, mathjax_script)
# Set process_summary's mathjax_script variable
process_summary.mathjax_script = None
if mathjax_settings['process_summary']:
process_summary.mathjax_script = mathjax_script
def rst_add_mathjax(content):
"""Adds mathjax script for reStructuredText"""
# .rst is the only valid extension for reStructuredText files
_, ext = os.path.splitext(os.path.basename(content.source_path))
if ext != '.rst':
return
# If math class is present in text, add the javascript
# note that RST hardwires mathjax to be class "math"
if 'class="math"' in content._content:
content._content += "<script type='text/javascript'>%s</script>" % rst_add_mathjax.mathjax_script
def process_rst_and_summaries(content_generators):
"""
Ensure mathjax script is applied to RST and summaries are
corrected if specified in user settings.
Handles content attached to ArticleGenerator and PageGenerator objects,
since the plugin doesn't know how to handle other Generator types.
For reStructuredText content, examine both articles and pages.
If article or page is reStructuredText and there is math present,
append the mathjax script.
Also process summaries if present (only applies to articles)
and user wants summaries processed (via user settings)
"""
for generator in content_generators:
if isinstance(generator, generators.ArticlesGenerator):
for article in generator.articles + generator.translations:
rst_add_mathjax(article)
#optionally fix truncated formulae in summaries.
if process_summary.mathjax_script is not None:
process_summary(article)
elif isinstance(generator, generators.PagesGenerator):
for page in generator.pages:
rst_add_mathjax(page)
def register():
"""Plugin registration"""
signals.initialized.connect(pelican_init)
signals.all_generators_finalized.connect(process_rst_and_summaries)
| agpl-3.0 |
dulems/hue | desktop/core/src/desktop/lib/test_utils.py | 17 | 2226 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from lxml import objectify, etree
from django.contrib.auth.models import Group, User
from useradmin.models import HuePermission, GroupPermission, get_default_user_group
def grant_access(username, groupname, appname):
add_permission(username, groupname, 'access', appname)
def add_permission(username, groupname, permname, appname):
user = User.objects.get(username=username)
group, created = Group.objects.get_or_create(name=groupname)
perm, created = HuePermission.objects.get_or_create(app=appname, action=permname)
GroupPermission.objects.get_or_create(group=group, hue_permission=perm)
if not user.groups.filter(name=group.name).exists():
user.groups.add(group)
user.save()
def add_to_group(username, groupname=get_default_user_group().name):
user = User.objects.get(username=username)
group, created = Group.objects.get_or_create(name=groupname)
if not user.groups.filter(name=group.name).exists():
user.groups.add(group)
user.save()
def reformat_json(json_obj):
if isinstance(json_obj, basestring):
return json.dumps(json.loads(json_obj))
else:
return json.dumps(json_obj)
def reformat_xml(xml_obj):
if isinstance(xml_obj, basestring):
return etree.tostring(objectify.fromstring(xml_obj, etree.XMLParser(strip_cdata=False, remove_blank_text=True)))
else:
return etree.tostring(xml_obj)
| apache-2.0 |
matthiasdiener/spack | var/spack/repos/builtin/packages/transposome/package.py | 5 | 1624 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Transposome(PerlPackage):
"""A toolkit for annotation of transposable element families from
unassembled sequence reads."""
homepage = "https://sestaton.github.io/Transposome/"
url = "https://github.com/sestaton/Transposome/archive/v0.11.2.tar.gz"
version('0.11.2', '157c1fc090b0aa30050d03df885dcde0')
depends_on('blast-plus')
| lgpl-2.1 |
fidodaj/info3180lab4 | server/lib/flask/wrappers.py | 773 | 6709 | # -*- coding: utf-8 -*-
"""
flask.wrappers
~~~~~~~~~~~~~~
Implements the WSGI wrappers (request and response).
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase
from werkzeug.exceptions import BadRequest
from .debughelpers import attach_enctype_error_multidict
from . import json
from .globals import _request_ctx_stack
_missing = object()
def _get_data(req, cache):
getter = getattr(req, 'get_data', None)
if getter is not None:
return getter(cache=cache)
return req.data
class Request(RequestBase):
"""The request object used by default in Flask. Remembers the
matched endpoint and view arguments.
It is what ends up as :class:`~flask.request`. If you want to replace
the request object used you can subclass this and set
:attr:`~flask.Flask.request_class` to your subclass.
The request object is a :class:`~werkzeug.wrappers.Request` subclass and
provides all of the attributes Werkzeug defines plus a few Flask
specific ones.
"""
#: the internal URL rule that matched the request. This can be
#: useful to inspect which methods are allowed for the URL from
#: a before/after handler (``request.url_rule.methods``) etc.
#:
#: .. versionadded:: 0.6
url_rule = None
#: a dict of view arguments that matched the request. If an exception
#: happened when matching, this will be `None`.
view_args = None
#: if matching the URL failed, this is the exception that will be
#: raised / was raised as part of the request handling. This is
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or
#: something similar.
routing_exception = None
# switched by the request context until 1.0 to opt in deprecated
# module functionality
_is_old_module = False
@property
def max_content_length(self):
"""Read-only view of the `MAX_CONTENT_LENGTH` config key."""
ctx = _request_ctx_stack.top
if ctx is not None:
return ctx.app.config['MAX_CONTENT_LENGTH']
@property
def endpoint(self):
"""The endpoint that matched the request. This in combination with
:attr:`view_args` can be used to reconstruct the same or a
modified URL. If an exception happened when matching, this will
be `None`.
"""
if self.url_rule is not None:
return self.url_rule.endpoint
@property
def module(self):
"""The name of the current module if the request was dispatched
to an actual module. This is deprecated functionality, use blueprints
instead.
"""
from warnings import warn
warn(DeprecationWarning('modules were deprecated in favor of '
'blueprints. Use request.blueprint '
'instead.'), stacklevel=2)
if self._is_old_module:
return self.blueprint
@property
def blueprint(self):
"""The name of the current blueprint"""
if self.url_rule and '.' in self.url_rule.endpoint:
return self.url_rule.endpoint.rsplit('.', 1)[0]
@property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data. Otherwise this will be `None`.
The :meth:`get_json` method should be used instead.
"""
# XXX: deprecate property
return self.get_json()
def get_json(self, force=False, silent=False, cache=True):
"""Parses the incoming JSON request data and returns it. If
parsing fails the :meth:`on_json_loading_failed` method on the
request object will be invoked. By default this function will
only load the json data if the mimetype is ``application/json``
but this can be overriden by the `force` parameter.
:param force: if set to `True` the mimetype is ignored.
:param silent: if set to `False` this method will fail silently
and return `False`.
:param cache: if set to `True` the parsed JSON data is remembered
on the request.
"""
rv = getattr(self, '_cached_json', _missing)
if rv is not _missing:
return rv
if self.mimetype != 'application/json' and not force:
return None
# We accept a request charset against the specification as
# certain clients have been using this in the past. This
# fits our general approach of being nice in what we accept
# and strict in what we send out.
request_charset = self.mimetype_params.get('charset')
try:
data = _get_data(self, cache)
if request_charset is not None:
rv = json.loads(data, encoding=request_charset)
else:
rv = json.loads(data)
except ValueError as e:
if silent:
rv = None
else:
rv = self.on_json_loading_failed(e)
if cache:
self._cached_json = rv
return rv
def on_json_loading_failed(self, e):
"""Called if decoding of the JSON data failed. The return value of
this method is used by :meth:`get_json` when an error occurred. The
default implementation just raises a :class:`BadRequest` exception.
.. versionchanged:: 0.10
Removed buggy previous behavior of generating a random JSON
response. If you want that behavior back you can trivially
add it by subclassing.
.. versionadded:: 0.8
"""
raise BadRequest()
def _load_form_data(self):
RequestBase._load_form_data(self)
# in debug mode we're replacing the files multidict with an ad-hoc
# subclass that raises a different error for key errors.
ctx = _request_ctx_stack.top
if ctx is not None and ctx.app.debug and \
self.mimetype != 'multipart/form-data' and not self.files:
attach_enctype_error_multidict(self)
class Response(ResponseBase):
"""The response object that is used by default in Flask. Works like the
response object from Werkzeug but is set to have an HTML mimetype by
default. Quite often you don't have to create this object yourself because
:meth:`~flask.Flask.make_response` will take care of that for you.
If you want to replace the response object used you can subclass this and
set :attr:`~flask.Flask.response_class` to your subclass.
"""
default_mimetype = 'text/html'
| apache-2.0 |
odubno/microblog | venv/lib/python2.7/site-packages/coverage/bytecode.py | 209 | 2036 | """Bytecode manipulation for coverage.py"""
import opcode, types
from coverage.backward import byte_to_int
class ByteCode(object):
"""A single bytecode."""
def __init__(self):
# The offset of this bytecode in the code object.
self.offset = -1
# The opcode, defined in the `opcode` module.
self.op = -1
# The argument, a small integer, whose meaning depends on the opcode.
self.arg = -1
# The offset in the code object of the next bytecode.
self.next_offset = -1
# The offset to jump to.
self.jump_to = -1
class ByteCodes(object):
"""Iterator over byte codes in `code`.
Returns `ByteCode` objects.
"""
# pylint: disable=R0924
def __init__(self, code):
self.code = code
def __getitem__(self, i):
return byte_to_int(self.code[i])
def __iter__(self):
offset = 0
while offset < len(self.code):
bc = ByteCode()
bc.op = self[offset]
bc.offset = offset
next_offset = offset+1
if bc.op >= opcode.HAVE_ARGUMENT:
bc.arg = self[offset+1] + 256*self[offset+2]
next_offset += 2
label = -1
if bc.op in opcode.hasjrel:
label = next_offset + bc.arg
elif bc.op in opcode.hasjabs:
label = bc.arg
bc.jump_to = label
bc.next_offset = offset = next_offset
yield bc
class CodeObjects(object):
"""Iterate over all the code objects in `code`."""
def __init__(self, code):
self.stack = [code]
def __iter__(self):
while self.stack:
# We're going to return the code object on the stack, but first
# push its children for later returning.
code = self.stack.pop()
for c in code.co_consts:
if isinstance(c, types.CodeType):
self.stack.append(c)
yield code
| bsd-3-clause |
crunchmail/munch-core | src/munch/apps/campaigns/tools.py | 1 | 3004 | import urllib
from django.urls import get_resolver
from django.urls import NoReverseMatch
def resolve_url(request, path=None):
"""Helper function that reports information on the request's url.
Taken from http://code.google.com/p/greatlemers-django-tools \
/source/browse/trunk/gdt_nav/models.py#158
Apache License 2.0
This utility function takes a request and analyses its url to generate the
url_name and keyword arguments that can be used to generate the url via
the reverse function or url tag.
The url resolver doesn't return the name of the url that produces the
given url so some hunting around has to be done to determine what exactly
it should be.
Keyword arguments:
request -- The request object for the view that wants to generate some
menus.
path -- The relative path (default: gets it from the request.path_info)
Returns:
A tuple of (url, url_name, url_kwargs)
url -- The absolute representation of the requested url
url_name -- The 'reversable' name of the requested url
url_kwargs -- The keyword arguments that would be needed in order to
'reverse' the url.
"""
# Start by fetching the path from the request and using it to build
# the full url.
if not path:
path = request.path_info
url = request.build_absolute_uri(path)
# make sure path is only the local path
path = urllib.parse.urlparse(path).path
# The url resolver which will generate some of the url info.
# Get urlconf from request object if available.
urlconf = getattr(request, "urlconf", None)
resolver = get_resolver(urlconf)
# Pull out the view function, and the url arguments and keywords.
view_func, url_args, url_kwargs = resolver.resolve(path)
# Fetch a list of all the signatures of the items that can be reversed
# to produce the view function.
sigs = resolver.reverse_dict.getlist(view_func)
url_name = None
# Loop through all the items in the reverse dictionary.
for key, value in resolver.reverse_dict.items():
# Check if the value of the mapping is one of our matching signatures
# and that the key is a string.
if value in sigs and type(key) == str:
try:
# See if we have the right parameters to use this reversal and
# that it produces the correct url.
if resolver.reverse(key, *url_args, **url_kwargs) == path[1:]:
# No exceptions were thrown so we have the right parameters
# and the path matched therefore we've found the url name
# we were seeking - which of course means we can
# stop looking.
url_name = key
break
except NoReverseMatch:
# The parameters were wrong - ah well, maybe the next one will
# succeed.
pass
return url, url_name, url_kwargs
| agpl-3.0 |
vaygr/ansible | lib/ansible/utils/module_docs_fragments/gcp.py | 23 | 1712 | # Copyright: (c) 2018, Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# GCP doc fragment.
DOCUMENTATION = '''
options:
state:
description:
- Whether the given zone should or should not be present.
required: true
choices: ["present", "absent"]
default: "present"
project:
description:
- The Google Cloud Platform project to use.
auth_kind:
description:
- The type of credential used.
required: true
choices: ["machineaccount", "serviceaccount", "application"]
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
service_account_email:
description:
- An optional service account email address if machineaccount is selected
and the user does not wish to use the default email.
scopes:
description:
- Array of scopes to be used.
required: true
notes:
- For authentication, you can set service_account_file using the
C(GCP_SERVICE_ACCOUNT_FILE) env variable.
- For authentication, you can set service_account_email using the
C(GCP_SERVICE_ACCOUNT_EMAIL) env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env
variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are
not set.
- The I(service_account_email) and I(service_account_file) options are
mutually exclusive.
'''
| gpl-3.0 |
adminneyk/codificacionproyectando | application/views/Generacion/Generacion/lib/openoffice/openoffice.org/basis3.4/program/uno.py | 2 | 12731 | #**************************************************************
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#**************************************************************
import sys
import pyuno
import __builtin__
import socket # since on Windows sal3.dll no longer calls WSAStartup
# all functions and variables starting with a underscore (_) must be considered private
# and can be changed at any time. Don't use them
_g_ctx = pyuno.getComponentContext( )
_g_delegatee = __builtin__.__dict__["__import__"]
def getComponentContext():
""" returns the UNO component context, that was used to initialize the python runtime.
"""
return _g_ctx
def getConstantByName( constant ):
"Looks up the value of a idl constant by giving its explicit name"
return pyuno.getConstantByName( constant )
def getTypeByName( typeName):
""" returns a uno.Type instance of the type given by typeName. In case the
type does not exist, a com.sun.star.uno.RuntimeException is raised.
"""
return pyuno.getTypeByName( typeName )
def createUnoStruct( typeName, *args ):
"""creates a uno struct or exception given by typeName. The parameter args may
1) be empty. In this case, you get a default constructed uno structure.
( e.g. createUnoStruct( "com.sun.star.uno.Exception" ) )
2) be a sequence with exactly one element, that contains an instance of typeName.
In this case, a copy constructed instance of typeName is returned
( e.g. createUnoStruct( "com.sun.star.uno.Exception" , e ) )
3) be a sequence, where the length of the sequence must match the number of
elements within typeName (e.g.
createUnoStruct( "com.sun.star.uno.Exception", "foo error" , self) ). The
elements with in the sequence must match the type of each struct element,
otherwise an exception is thrown.
"""
return getClass(typeName)( *args )
def getClass( typeName ):
"""returns the class of a concrete uno exception, struct or interface
"""
return pyuno.getClass(typeName)
def isInterface( obj ):
"""returns true, when obj is a class of a uno interface"""
return pyuno.isInterface( obj )
def generateUuid():
"returns a 16 byte sequence containing a newly generated uuid or guid, see rtl/uuid.h "
return pyuno.generateUuid()
def systemPathToFileUrl( systemPath ):
"returns a file-url for the given system path"
return pyuno.systemPathToFileUrl( systemPath )
def fileUrlToSystemPath( url ):
"returns a system path (determined by the system, the python interpreter is running on)"
return pyuno.fileUrlToSystemPath( url )
def absolutize( path, relativeUrl ):
"returns an absolute file url from the given urls"
return pyuno.absolutize( path, relativeUrl )
def getCurrentContext():
"""Returns the currently valid current context.
see http://udk.openoffice.org/common/man/concept/uno_contexts.html#current_context
for an explanation on the current context concept
"""
return pyuno.getCurrentContext()
def setCurrentContext( newContext ):
"""Sets newContext as new uno current context. The newContext must
implement the XCurrentContext interface. The implemenation should
handle the desired properties and delegate unknown properties to the
old context. Ensure to reset the old one when you leave your stack ...
see http://udk.openoffice.org/common/man/concept/uno_contexts.html#current_context
"""
return pyuno.setCurrentContext( newContext )
class Enum:
"Represents a UNO idl enum, use an instance of this class to explicitly pass a boolean to UNO"
#typeName the name of the enum as a string
#value the actual value of this enum as a string
def __init__(self,typeName, value):
self.typeName = typeName
self.value = value
pyuno.checkEnum( self )
def __repr__(self):
return "<uno.Enum %s (%r)>" % (self.typeName, self.value)
def __eq__(self, that):
if not isinstance(that, Enum):
return False
return (self.typeName == that.typeName) and (self.value == that.value)
class Type:
"Represents a UNO type, use an instance of this class to explicitly pass a boolean to UNO"
# typeName # Name of the UNO type
# typeClass # python Enum of TypeClass, see com/sun/star/uno/TypeClass.idl
def __init__(self, typeName, typeClass):
self.typeName = typeName
self.typeClass = typeClass
pyuno.checkType(self)
def __repr__(self):
return "<Type instance %s (%r)>" % (self.typeName, self.typeClass)
def __eq__(self, that):
if not isinstance(that, Type):
return False
return self.typeClass == that.typeClass and self.typeName == that.typeName
def __hash__(self):
return self.typeName.__hash__()
class Bool(object):
"""Represents a UNO boolean, use an instance of this class to explicitly
pass a boolean to UNO.
Note: This class is deprecated. Use python's True and False directly instead
"""
def __new__(cls, value):
if isinstance(value, (str, unicode)) and value == "true":
return True
if isinstance(value, (str, unicode)) and value == "false":
return False
if value:
return True
return False
class Char:
"Represents a UNO char, use an instance of this class to explicitly pass a char to UNO"
# @param value pass a Unicode string with length 1
def __init__(self,value):
assert isinstance(value, unicode)
assert len(value) == 1
self.value=value
def __repr__(self):
return "<Char instance %s>" % (self.value, )
def __eq__(self, that):
if isinstance(that, (str, unicode)):
if len(that) > 1:
return False
return self.value == that[0]
if isinstance(that, Char):
return self.value == that.value
return False
# Suggested by Christian, but still some open problems which need to be solved first
#
#class ByteSequence(str):
#
# def __repr__(self):
# return "<ByteSequence instance %s>" % str.__repr__(self)
# for a little bit compatitbility; setting value is not possible as
# strings are immutable
# def _get_value(self):
# return self
#
# value = property(_get_value)
class ByteSequence:
def __init__(self, value):
if isinstance(value, str):
self.value = value
elif isinstance(value, ByteSequence):
self.value = value.value
else:
raise TypeError("expected string or bytesequence")
def __repr__(self):
return "<ByteSequence instance '%s'>" % (self.value, )
def __eq__(self, that):
if isinstance( that, ByteSequence):
return self.value == that.value
if isinstance(that, str):
return self.value == that
return False
def __len__(self):
return len(self.value)
def __getitem__(self, index):
return self.value[index]
def __iter__( self ):
return self.value.__iter__()
def __add__( self , b ):
if isinstance( b, str ):
return ByteSequence( self.value + b )
elif isinstance( b, ByteSequence ):
return ByteSequence( self.value + b.value )
raise TypeError( "expected string or ByteSequence as operand" )
def __hash__( self ):
return self.value.hash()
class Any:
"use only in connection with uno.invoke() to pass an explicit typed any"
def __init__(self, type, value ):
if isinstance( type, Type ):
self.type = type
else:
self.type = getTypeByName( type )
self.value = value
def invoke( object, methodname, argTuple ):
"use this function to pass exactly typed anys to the callee (using uno.Any)"
return pyuno.invoke( object, methodname, argTuple )
#---------------------------------------------------------------------------------------
# don't use any functions beyond this point, private section, likely to change
#---------------------------------------------------------------------------------------
#def _uno_import( name, globals={}, locals={}, fromlist=[], level=-1 ):
def _uno_import( name, *optargs, **kwargs ):
try:
# print "optargs = " + repr(optargs)
return _g_delegatee( name, *optargs, **kwargs )
except ImportError:
# process optargs
globals, locals, fromlist = list(optargs)[:3] + [kwargs.get('globals',{}), kwargs.get('locals',{}), kwargs.get('fromlist',[])][len(optargs):]
if not fromlist:
raise
modnames = name.split( "." )
mod = None
d = sys.modules
for x in modnames:
if d.has_key(x):
mod = d[x]
else:
mod = pyuno.__class__(x) # How to create a module ??
d = mod.__dict__
RuntimeException = pyuno.getClass( "com.sun.star.uno.RuntimeException" )
for x in fromlist:
if not d.has_key(x):
if x.startswith( "typeOf" ):
try:
d[x] = pyuno.getTypeByName( name + "." + x[6:len(x)] )
except RuntimeException,e:
raise ImportError( "type " + name + "." + x[6:len(x)] +" is unknown" )
else:
try:
# check for structs, exceptions or interfaces
d[x] = pyuno.getClass( name + "." + x )
except RuntimeException,e:
# check for enums
try:
d[x] = Enum( name , x )
except RuntimeException,e2:
# check for constants
try:
d[x] = getConstantByName( name + "." + x )
except RuntimeException,e3:
# no known uno type !
raise ImportError( "type "+ name + "." +x + " is unknown" )
return mod
# hook into the __import__ chain
__builtin__.__dict__["__import__"] = _uno_import
# private function, don't use
def _impl_extractName(name):
r = range (len(name)-1,0,-1)
for i in r:
if name[i] == ".":
name = name[i+1:len(name)]
break
return name
# private, referenced from the pyuno shared library
def _uno_struct__init__(self,*args):
if len(args) == 1 and hasattr(args[0], "__class__") and args[0].__class__ == self.__class__ :
self.__dict__["value"] = args[0]
else:
self.__dict__["value"] = pyuno._createUnoStructHelper(self.__class__.__pyunostruct__,args)
# private, referenced from the pyuno shared library
def _uno_struct__getattr__(self,name):
return __builtin__.getattr(self.__dict__["value"],name)
# private, referenced from the pyuno shared library
def _uno_struct__setattr__(self,name,value):
return __builtin__.setattr(self.__dict__["value"],name,value)
# private, referenced from the pyuno shared library
def _uno_struct__repr__(self):
return repr(self.__dict__["value"])
def _uno_struct__str__(self):
return str(self.__dict__["value"])
# private, referenced from the pyuno shared library
def _uno_struct__eq__(self,cmp):
if hasattr(cmp,"value"):
return self.__dict__["value"] == cmp.__dict__["value"]
return False
# referenced from pyuno shared lib and pythonscript.py
def _uno_extract_printable_stacktrace( trace ):
mod = None
try:
mod = __import__("traceback")
except ImportError,e:
pass
ret = ""
if mod:
lst = mod.extract_tb( trace )
max = len(lst)
for j in range(max):
i = lst[max-j-1]
ret = ret + " " + str(i[0]) + ":" + \
str(i[1]) + " in function " + \
str(i[2]) + "() [" + str(i[3]) + "]\n"
else:
ret = "Couldn't import traceback module"
return ret
| mit |
joernhees/git-hg-remote-bug_gae-init | main/lib/flaskext/wtf/recaptcha/widgets.py | 16 | 2645 | """
Custom widgets
"""
try:
import json
except ImportError:
import simplejson as json
from flask import current_app
from werkzeug import url_encode
# use flaskext.babel for translations, if available
try:
from flaskext.babel import gettext as _
except ImportError:
_ = lambda(s) : s
RECAPTCHA_API_SERVER = 'http://api.recaptcha.net/'
RECAPTCHA_SSL_API_SERVER = 'https://www.google.com/recaptcha/api/'
RECAPTCHA_HTML = u'''
<script type="text/javascript">var RecaptchaOptions = %(options)s;</script>
<script type="text/javascript" src="%(script_url)s"></script>
<noscript>
<div><iframe src="%(frame_url)s" height="300" width="500"></iframe></div>
<div><textarea name="recaptcha_challenge_field" rows="3" cols="40"></textarea>
<input type="hidden" name="recaptcha_response_field" value="manual_challenge"></div>
</noscript>
'''
__all__ = ["RecaptchaWidget"]
class RecaptchaWidget(object):
def recaptcha_html(self, server, query, options):
return RECAPTCHA_HTML % dict(
script_url='%schallenge?%s' % (server, query),
frame_url='%snoscript?%s' % (server, query),
options=json.dumps(options)
)
def __call__(self, field, error=None, **kwargs):
"""Returns the recaptcha input HTML."""
if current_app.config.get('RECAPTCHA_USE_SSL', False):
server = RECAPTCHA_SSL_API_SERVER
else:
server = RECAPTCHA_API_SERVER
try:
public_key = current_app.config['RECAPTCHA_PUBLIC_KEY']
except KeyError:
raise RuntimeError, "RECAPTCHA_PUBLIC_KEY config not set"
query_options = dict(k=public_key)
if field.recaptcha_error is not None:
query_options['error'] = unicode(field.recaptcha_error)
query = url_encode(query_options)
options = {
'theme': 'clean',
'custom_translations': {
'visual_challenge': _('Get a visual challenge'),
'audio_challenge': _('Get an audio challenge'),
'refresh_btn': _('Get a new challenge'),
'instructions_visual': _('Type the two words:'),
'instructions_audio': _('Type what you hear:'),
'help_btn': _('Help'),
'play_again': _('Play sound again'),
'cant_hear_this': _('Download sound as MP3'),
'incorrect_try_again': _('Incorrect. Try again.'),
}
}
options.update(current_app.config.get('RECAPTCHA_OPTIONS', {}))
return self.recaptcha_html(server, query, options)
| mit |
nharraud/b2share | invenio/modules/formatter/manage.py | 13 | 10738 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Perform template migration operations.
Migrate output formats and output templates found in
``CFG_BIBFORMAT_OUTPUTS_PATH`` and ``CFG_BIBFORMAT_TEMPLATES_PATH``
respectively. It creates backup of each output format with name
``<FORMAT>_legacy.bfo`` and generates new Jinja2 templates in
``CFG_BIBFORMAT_JINJA_TEMPLATE_PATH``.
"""
from __future__ import print_function
import os
import re
import shutil
from six import iteritems
from invenio.ext.script import Manager
manager = Manager(usage="Perform template migration operations.")
@manager.option('--rewrite-existing-templates',
dest='rewrite_existing_templates',
action='store_true', default=False)
@manager.option('-t', '--template',
dest='only_template_re', default=None,
help="only templates matching regular expression")
@manager.option('--verbose', dest='verbose')
def bft2tpl(rewrite_existing_templates=False, only_template_re=None,
verbose=0):
"""Convert *bft* templates to Jinja2 *tpl* templates."""
# Import all invenio modules inside to avoid side-efects ouside
# Flask application context.
from invenio.modules.formatter.config import CFG_BIBFORMAT_OUTPUTS_PATH, \
CFG_BIBFORMAT_FORMAT_OUTPUT_EXTENSION, \
CFG_BIBFORMAT_FORMAT_TEMPLATE_EXTENSION, \
CFG_BIBFORMAT_FORMAT_JINJA_TEMPLATE_EXTENSION, \
CFG_BIBFORMAT_JINJA_TEMPLATE_PATH
from invenio.modules.formatter.engine import get_format_element, \
get_output_formats, \
pattern_function_params, \
pattern_tag, pattern_lang, \
translation_pattern, \
ln_pattern, get_format_templates
from invenio.legacy.bibformat.adminlib import \
update_output_format_rules
only_template = re.compile(only_template_re) \
if only_template_re is not None else None
def rename_template(template):
if template[-3:] == CFG_BIBFORMAT_FORMAT_TEMPLATE_EXTENSION and \
(only_template is None or only_template.match(template)):
return template[:-3] + \
CFG_BIBFORMAT_FORMAT_JINJA_TEMPLATE_EXTENSION
return template
def update_rule(rule):
rule['template'] = rename_template(rule['template'])
print(' ...', rule['template'], 'to', end=' ')
print(rename_template(rule['template']))
print(' ', rule)
return rule
def eval_format_template_elements(format_template, bfo, verbose=0):
def insert_element_code(match):
error = []
function_name = match.group("function_name")
try:
format_element = get_format_element(function_name, verbose)
except Exception:
error.append('Invalid function name %s' % (function_name, ))
params_str = []
if format_element is not None:
params = {}
# Look for function parameters given in format template code
all_params = match.group('params')
if all_params is not None:
function_params_iterator = pattern_function_params.\
finditer(all_params)
for param_match in function_params_iterator:
sep = param_match.group('sep')
name = param_match.group('param')
value = param_match.group('value')
params[name] = value
params_str.append(name + '=' + sep + value + sep)
# Replace element with function call with params.
result = '{{ bfe_%s(bfo, %s) }}' % (function_name.lower(),
', '.join(params_str))
return result
print('\n'.join(error))
# Substitute special tags in the format by our own text.
# Special tags have the form
# <BFE_format_element_name [param="value"]* />
format = pattern_tag.sub(insert_element_code, format_template)
return format
def translate(match):
"""Translate matching values."""
word = match.group("word")
translated_word = '{{ _("' + word + '") }}'
return translated_word
def filter_languages(format_template):
"""Filter languages in format template."""
def search_lang_tag(match):
"""Searche for the <lang>...</lang> tag."""
ln_tags = {}
def clean_language_tag(match):
"""Return tag text content.
It contains if statement block to match output language.
It is called by substitution in 'filter_languages(...)'.
@param match: a match object corresponding to the special tag
that must be interpreted
"""
ln_tags[match.group(1)] = match.group(2)
return '{% if g.ln == "' + match.group(1) + '" %}' + \
match.group(2) + '{% endif %}'
# End of clean_language_tag
lang_tag_content = match.group("langs")
return '{% lang %}' + lang_tag_content + '{% endlang %}'
cleaned_lang_tag = ln_pattern.sub(clean_language_tag,
lang_tag_content)
# FIXME no traslation for current language
# if len(ln_tags) > 0:
# cleaned_lang_tag += '{% if not g.ln in ["' + \
# '", "'.join(ln_tags.keys()) + '"] %}' + \
# ln_tags.get(CFG_SITE_LANG, '') + '{% endif %}'
return cleaned_lang_tag
# End of search_lang_tag
filtered_format_template = pattern_lang.sub(search_lang_tag,
format_template)
return filtered_format_template
skip_templates = lambda (name, key): name[-3:] != 'xsl'
format_templates = filter(skip_templates,
iteritems(get_format_templates(True)))
print('>>> Going to migrate %d format template(s) ...' % (
len(format_templates), ))
if not os.path.exists(CFG_BIBFORMAT_JINJA_TEMPLATE_PATH):
os.makedirs(CFG_BIBFORMAT_JINJA_TEMPLATE_PATH)
for name, template in format_templates:
if not (only_template is None or only_template.match(name)):
continue
new_name = os.path.join(CFG_BIBFORMAT_JINJA_TEMPLATE_PATH,
rename_template(name))
if os.path.exists(new_name):
print(' [!] File', new_name, 'already exists.', end=' ')
if not rewrite_existing_templates:
print('Skipped.')
continue
else:
shutil.copy2(new_name, new_name + '.backup')
print('Rewritten.')
print(' ... migrating', name, 'to', new_name)
with open(new_name, 'w+') as f:
code = template['code']
ln_tags_format = filter_languages(code)
localized_format = translation_pattern.sub(translate,
ln_tags_format)
evaled = eval_format_template_elements(localized_format, None)
f.write(evaled)
print()
skip_legacy = lambda (name, key): name[-11:] != '_legacy.' + \
CFG_BIBFORMAT_FORMAT_OUTPUT_EXTENSION
output_formats = filter(
skip_legacy, iteritems(get_output_formats(with_attributes=True)))
print('>>> Going to migrate %d output format(s) ...' % (
len(output_formats)))
for name, output_format in output_formats:
if not any(map(lambda rule: rule['template'][-3:] ==
CFG_BIBFORMAT_FORMAT_TEMPLATE_EXTENSION,
output_format['rules'])):
print(' [!]', name, 'does not contain any', end=' ')
print(CFG_BIBFORMAT_FORMAT_TEMPLATE_EXTENSION, 'template', end=' ')
if only_template is not None:
print('or does not match', only_template_re, end=' ')
print('.')
continue
new_name = name[:-4] + \
'_legacy.' + CFG_BIBFORMAT_FORMAT_OUTPUT_EXTENSION
if os.path.exists(os.path.join(CFG_BIBFORMAT_OUTPUTS_PATH, new_name)):
print(' [!] File', new_name, 'already exists. Skipped.')
continue
shutil.copy2(
os.path.join(CFG_BIBFORMAT_OUTPUTS_PATH, name),
os.path.join(CFG_BIBFORMAT_OUTPUTS_PATH, new_name))
# rename template names
print(' ... migrating', name, 'to', new_name)
update_output_format_rules(name,
map(update_rule, output_format['rules']),
rename_template(output_format['default']))
print()
print('>>> Please re-run `bibreformat` for all cached output formats.')
print(' $ bibreformat -oHB,HD -a')
@manager.option('-o', '--output-format', dest='output_format',
default="HB", help="Specify output format/s (default HB)")
def expunge(output_format="HB"):
"""Remove static output formats from cache."""
from invenio.ext.sqlalchemy import db
from invenio.modules.formatter.models import Bibfmt
# Make it uppercased as it is stored in database.
output_format = output_format.upper()
print(">>> Cleaning %s cache..." % (output_format, ))
# Prepare where expression.
filter_format = (
Bibfmt.format == output_format if ',' not in output_format else
Bibfmt.format.in_(map(lambda x: x.strip(), output_format.split(',')))
)
Bibfmt.query.filter(filter_format).delete(synchronize_session=False)
db.session.commit()
def main():
"""Run manager."""
from invenio.base.factory import create_app
app = create_app()
manager.app = app
manager.run()
if __name__ == '__main__':
main()
| gpl-2.0 |
Shade5/coala-bears | tests/general/LineCountBearTest.py | 17 | 1831 | from queue import Queue
from bears.general.LineCountBear import LineCountBear
from coalib.testing.LocalBearTestHelper import LocalBearTestHelper
from coalib.results.Result import RESULT_SEVERITY, Result
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
class LineCountBearTest(LocalBearTestHelper):
def setUp(self):
self.section = Section('name')
self.uut = LineCountBear(self.section, Queue())
def test_run(self):
self.section.append(Setting('max_lines_per_file', 1))
self.check_results(
self.uut, ['line 1', 'line 2', 'line 3'],
[Result.from_values('LineCountBear',
'This file had 3 lines, which is 2 lines more '
'than the maximum limit specified.',
severity=RESULT_SEVERITY.NORMAL,
file='default')],
filename='default')
self.check_validity(self.uut, ['1 line'])
self.check_validity(self.uut, []) # Empty file
def test_exclude_blank_lines(self):
self.section.append(Setting('exclude_blank_lines', True))
self.section.append(Setting('max_lines_per_file', 2))
self.check_results(
self.uut, ['line 1', ' ', 'line 2',
'line 3', '\n', '\t', ' line 4',
'line 5 ', ' line 6 ', '\t\tline 7',
'', '\t \n ', ' \t\n '],
[Result.from_values('LineCountBear',
'This file had 7 lines, which is 5 lines more '
'than the maximum limit specified.',
severity=RESULT_SEVERITY.NORMAL,
file='default')],
filename='default')
| agpl-3.0 |
LIKAIMO/MissionPlanner | Lib/HTMLParser.py | 50 | 14059 | """A parser for HTML and XHTML."""
# This file is based on sgmllib.py, but the API is slightly different.
# XXX There should be a way to distinguish between PCDATA (parsed
# character data -- the normal case), RCDATA (replaceable character
# data -- only char and entity references and end tags are special)
# and CDATA (character data -- only end tags are special).
import markupbase
import re
# Regular expressions used for parsing
interesting_normal = re.compile('[&<]')
interesting_cdata = re.compile(r'<(/|\Z)')
incomplete = re.compile('&[a-zA-Z#]')
entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]')
charref = re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]')
starttagopen = re.compile('<[a-zA-Z]')
piclose = re.compile('>')
commentclose = re.compile(r'--\s*>')
tagfind = re.compile('[a-zA-Z][-.a-zA-Z0-9:_]*')
attrfind = re.compile(
r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[^\s"\'=<>`]*))?')
locatestarttagend = re.compile(r"""
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
(?:\s+ # whitespace before attribute name
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
(?:\s*=\s* # value indicator
(?:'[^']*' # LITA-enclosed value
|\"[^\"]*\" # LIT-enclosed value
|[^'\">\s]+ # bare value
)
)?
)
)*
\s* # trailing whitespace
""", re.VERBOSE)
endendtag = re.compile('>')
endtagfind = re.compile('</\s*([a-zA-Z][-.a-zA-Z0-9:_]*)\s*>')
class HTMLParseError(Exception):
"""Exception raised for all parse errors."""
def __init__(self, msg, position=(None, None)):
assert msg
self.msg = msg
self.lineno = position[0]
self.offset = position[1]
def __str__(self):
result = self.msg
if self.lineno is not None:
result = result + ", at line %d" % self.lineno
if self.offset is not None:
result = result + ", column %d" % (self.offset + 1)
return result
class HTMLParser(markupbase.ParserBase):
"""Find tags and other markup and call handler functions.
Usage:
p = HTMLParser()
p.feed(data)
...
p.close()
Start tags are handled by calling self.handle_starttag() or
self.handle_startendtag(); end tags by self.handle_endtag(). The
data between tags is passed from the parser to the derived class
by calling self.handle_data() with the data as argument (the data
may be split up in arbitrary chunks). Entity references are
passed by calling self.handle_entityref() with the entity
reference as the argument. Numeric character references are
passed to self.handle_charref() with the string containing the
reference as the argument.
"""
CDATA_CONTENT_ELEMENTS = ("script", "style")
def __init__(self):
"""Initialize and reset this instance."""
self.reset()
def reset(self):
"""Reset this instance. Loses all unprocessed data."""
self.rawdata = ''
self.lasttag = '???'
self.interesting = interesting_normal
markupbase.ParserBase.reset(self)
def feed(self, data):
r"""Feed data to the parser.
Call this as often as you want, with as little or as much text
as you want (may include '\n').
"""
self.rawdata = self.rawdata + data
self.goahead(0)
def close(self):
"""Handle any buffered data."""
self.goahead(1)
def error(self, message):
raise HTMLParseError(message, self.getpos())
__starttag_text = None
def get_starttag_text(self):
"""Return full source of start tag: '<...>'."""
return self.__starttag_text
def set_cdata_mode(self):
self.interesting = interesting_cdata
def clear_cdata_mode(self):
self.interesting = interesting_normal
# Internal -- handle data as far as reasonable. May leave state
# and data to be processed by a subsequent call. If 'end' is
# true, force handling all data as if followed by EOF marker.
def goahead(self, end):
rawdata = self.rawdata
i = 0
n = len(rawdata)
while i < n:
match = self.interesting.search(rawdata, i) # < or &
if match:
j = match.start()
else:
j = n
if i < j: self.handle_data(rawdata[i:j])
i = self.updatepos(i, j)
if i == n: break
startswith = rawdata.startswith
if startswith('<', i):
if starttagopen.match(rawdata, i): # < + letter
k = self.parse_starttag(i)
elif startswith("</", i):
k = self.parse_endtag(i)
elif startswith("<!--", i):
k = self.parse_comment(i)
elif startswith("<?", i):
k = self.parse_pi(i)
elif startswith("<!", i):
k = self.parse_declaration(i)
elif (i + 1) < n:
self.handle_data("<")
k = i + 1
else:
break
if k < 0:
if end:
self.error("EOF in middle of construct")
break
i = self.updatepos(i, k)
elif startswith("&#", i):
match = charref.match(rawdata, i)
if match:
name = match.group()[2:-1]
self.handle_charref(name)
k = match.end()
if not startswith(';', k-1):
k = k - 1
i = self.updatepos(i, k)
continue
else:
if ";" in rawdata[i:]: #bail by consuming &#
self.handle_data(rawdata[0:2])
i = self.updatepos(i, 2)
break
elif startswith('&', i):
match = entityref.match(rawdata, i)
if match:
name = match.group(1)
self.handle_entityref(name)
k = match.end()
if not startswith(';', k-1):
k = k - 1
i = self.updatepos(i, k)
continue
match = incomplete.match(rawdata, i)
if match:
# match.group() will contain at least 2 chars
if end and match.group() == rawdata[i:]:
self.error("EOF in middle of entity or char ref")
# incomplete
break
elif (i + 1) < n:
# not the end of the buffer, and can't be confused
# with some other construct
self.handle_data("&")
i = self.updatepos(i, i + 1)
else:
break
else:
assert 0, "interesting.search() lied"
# end while
if end and i < n:
self.handle_data(rawdata[i:n])
i = self.updatepos(i, n)
self.rawdata = rawdata[i:]
# Internal -- parse processing instr, return end or -1 if not terminated
def parse_pi(self, i):
rawdata = self.rawdata
assert rawdata[i:i+2] == '<?', 'unexpected call to parse_pi()'
match = piclose.search(rawdata, i+2) # >
if not match:
return -1
j = match.start()
self.handle_pi(rawdata[i+2: j])
j = match.end()
return j
# Internal -- handle starttag, return end or -1 if not terminated
def parse_starttag(self, i):
self.__starttag_text = None
endpos = self.check_for_whole_start_tag(i)
if endpos < 0:
return endpos
rawdata = self.rawdata
self.__starttag_text = rawdata[i:endpos]
# Now parse the data between i+1 and j into a tag and attrs
attrs = []
match = tagfind.match(rawdata, i+1)
assert match, 'unexpected call to parse_starttag()'
k = match.end()
self.lasttag = tag = rawdata[i+1:k].lower()
while k < endpos:
m = attrfind.match(rawdata, k)
if not m:
break
attrname, rest, attrvalue = m.group(1, 2, 3)
if not rest:
attrvalue = None
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
attrvalue = self.unescape(attrvalue)
attrs.append((attrname.lower(), attrvalue))
k = m.end()
end = rawdata[k:endpos].strip()
if end not in (">", "/>"):
lineno, offset = self.getpos()
if "\n" in self.__starttag_text:
lineno = lineno + self.__starttag_text.count("\n")
offset = len(self.__starttag_text) \
- self.__starttag_text.rfind("\n")
else:
offset = offset + len(self.__starttag_text)
self.error("junk characters in start tag: %r"
% (rawdata[k:endpos][:20],))
if end.endswith('/>'):
# XHTML-style empty tag: <span attr="value" />
self.handle_startendtag(tag, attrs)
else:
self.handle_starttag(tag, attrs)
if tag in self.CDATA_CONTENT_ELEMENTS:
self.set_cdata_mode()
return endpos
# Internal -- check to see if we have a complete starttag; return end
# or -1 if incomplete.
def check_for_whole_start_tag(self, i):
rawdata = self.rawdata
m = locatestarttagend.match(rawdata, i)
if m:
j = m.end()
next = rawdata[j:j+1]
if next == ">":
return j + 1
if next == "/":
if rawdata.startswith("/>", j):
return j + 2
if rawdata.startswith("/", j):
# buffer boundary
return -1
# else bogus input
self.updatepos(i, j + 1)
self.error("malformed empty start tag")
if next == "":
# end of input
return -1
if next in ("abcdefghijklmnopqrstuvwxyz=/"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
# end of input in or before attribute value, or we have the
# '/' from a '/>' ending
return -1
self.updatepos(i, j)
self.error("malformed start tag")
raise AssertionError("we should not get here!")
# Internal -- parse endtag, return end or -1 if incomplete
def parse_endtag(self, i):
rawdata = self.rawdata
assert rawdata[i:i+2] == "</", "unexpected call to parse_endtag"
match = endendtag.search(rawdata, i+1) # >
if not match:
return -1
j = match.end()
match = endtagfind.match(rawdata, i) # </ + tag + >
if not match:
self.error("bad end tag: %r" % (rawdata[i:j],))
tag = match.group(1)
self.handle_endtag(tag.lower())
self.clear_cdata_mode()
return j
# Overridable -- finish processing of start+end tag: <tag.../>
def handle_startendtag(self, tag, attrs):
self.handle_starttag(tag, attrs)
self.handle_endtag(tag)
# Overridable -- handle start tag
def handle_starttag(self, tag, attrs):
pass
# Overridable -- handle end tag
def handle_endtag(self, tag):
pass
# Overridable -- handle character reference
def handle_charref(self, name):
pass
# Overridable -- handle entity reference
def handle_entityref(self, name):
pass
# Overridable -- handle data
def handle_data(self, data):
pass
# Overridable -- handle comment
def handle_comment(self, data):
pass
# Overridable -- handle declaration
def handle_decl(self, decl):
pass
# Overridable -- handle processing instruction
def handle_pi(self, data):
pass
def unknown_decl(self, data):
self.error("unknown declaration: %r" % (data,))
# Internal -- helper to remove special character quoting
entitydefs = None
def unescape(self, s):
if '&' not in s:
return s
def replaceEntities(s):
s = s.groups()[0]
try:
if s[0] == "#":
s = s[1:]
if s[0] in ['x','X']:
c = int(s[1:], 16)
else:
c = int(s)
return unichr(c)
except ValueError:
return '&#'+s+';'
else:
# Cannot use name2codepoint directly, because HTMLParser supports apos,
# which is not part of HTML 4
import htmlentitydefs
if HTMLParser.entitydefs is None:
entitydefs = HTMLParser.entitydefs = {'apos':u"'"}
for k, v in htmlentitydefs.name2codepoint.iteritems():
entitydefs[k] = unichr(v)
try:
return self.entitydefs[s]
except KeyError:
return '&'+s+';'
return re.sub(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));", replaceEntities, s)
| gpl-3.0 |
googlearchive/pywebsocket | mod_pywebsocket/handshake/hybi00.py | 675 | 11294 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides the opening handshake processor for the WebSocket
protocol version HyBi 00.
Specification:
http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-00
"""
# Note: request.connection.write/read are used in this module, even though
# mod_python document says that they should be used only in connection
# handlers. Unfortunately, we have no other options. For example,
# request.write/read are not suitable because they don't allow direct raw bytes
# writing/reading.
import logging
import re
import struct
from mod_pywebsocket import common
from mod_pywebsocket.stream import StreamHixie75
from mod_pywebsocket import util
from mod_pywebsocket.handshake._base import HandshakeException
from mod_pywebsocket.handshake._base import check_request_line
from mod_pywebsocket.handshake._base import format_header
from mod_pywebsocket.handshake._base import get_default_port
from mod_pywebsocket.handshake._base import get_mandatory_header
from mod_pywebsocket.handshake._base import parse_host_header
from mod_pywebsocket.handshake._base import validate_mandatory_header
_MANDATORY_HEADERS = [
# key, expected value or None
[common.UPGRADE_HEADER, common.WEBSOCKET_UPGRADE_TYPE_HIXIE75],
[common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE],
]
def _validate_subprotocol(subprotocol):
"""Checks if characters in subprotocol are in range between U+0020 and
U+007E. A value in the Sec-WebSocket-Protocol field need to satisfy this
requirement.
See the Section 4.1. Opening handshake of the spec.
"""
if not subprotocol:
raise HandshakeException('Invalid subprotocol name: empty')
# Parameter should be in the range U+0020 to U+007E.
for c in subprotocol:
if not 0x20 <= ord(c) <= 0x7e:
raise HandshakeException(
'Illegal character in subprotocol name: %r' % c)
def _check_header_lines(request, mandatory_headers):
check_request_line(request)
# The expected field names, and the meaning of their corresponding
# values, are as follows.
# |Upgrade| and |Connection|
for key, expected_value in mandatory_headers:
validate_mandatory_header(request, key, expected_value)
def _build_location(request):
"""Build WebSocket location for request."""
location_parts = []
if request.is_https():
location_parts.append(common.WEB_SOCKET_SECURE_SCHEME)
else:
location_parts.append(common.WEB_SOCKET_SCHEME)
location_parts.append('://')
host, port = parse_host_header(request)
connection_port = request.connection.local_addr[1]
if port != connection_port:
raise HandshakeException('Header/connection port mismatch: %d/%d' %
(port, connection_port))
location_parts.append(host)
if (port != get_default_port(request.is_https())):
location_parts.append(':')
location_parts.append(str(port))
location_parts.append(request.unparsed_uri)
return ''.join(location_parts)
class Handshaker(object):
"""Opening handshake processor for the WebSocket protocol version HyBi 00.
"""
def __init__(self, request, dispatcher):
"""Construct an instance.
Args:
request: mod_python request.
dispatcher: Dispatcher (dispatch.Dispatcher).
Handshaker will add attributes such as ws_resource in performing
handshake.
"""
self._logger = util.get_class_logger(self)
self._request = request
self._dispatcher = dispatcher
def do_handshake(self):
"""Perform WebSocket Handshake.
On _request, we set
ws_resource, ws_protocol, ws_location, ws_origin, ws_challenge,
ws_challenge_md5: WebSocket handshake information.
ws_stream: Frame generation/parsing class.
ws_version: Protocol version.
Raises:
HandshakeException: when any error happened in parsing the opening
handshake request.
"""
# 5.1 Reading the client's opening handshake.
# dispatcher sets it in self._request.
_check_header_lines(self._request, _MANDATORY_HEADERS)
self._set_resource()
self._set_subprotocol()
self._set_location()
self._set_origin()
self._set_challenge_response()
self._set_protocol_version()
self._dispatcher.do_extra_handshake(self._request)
self._send_handshake()
def _set_resource(self):
self._request.ws_resource = self._request.uri
def _set_subprotocol(self):
# |Sec-WebSocket-Protocol|
subprotocol = self._request.headers_in.get(
common.SEC_WEBSOCKET_PROTOCOL_HEADER)
if subprotocol is not None:
_validate_subprotocol(subprotocol)
self._request.ws_protocol = subprotocol
def _set_location(self):
# |Host|
host = self._request.headers_in.get(common.HOST_HEADER)
if host is not None:
self._request.ws_location = _build_location(self._request)
# TODO(ukai): check host is this host.
def _set_origin(self):
# |Origin|
origin = self._request.headers_in.get(common.ORIGIN_HEADER)
if origin is not None:
self._request.ws_origin = origin
def _set_protocol_version(self):
# |Sec-WebSocket-Draft|
draft = self._request.headers_in.get(common.SEC_WEBSOCKET_DRAFT_HEADER)
if draft is not None and draft != '0':
raise HandshakeException('Illegal value for %s: %s' %
(common.SEC_WEBSOCKET_DRAFT_HEADER,
draft))
self._logger.debug('Protocol version is HyBi 00')
self._request.ws_version = common.VERSION_HYBI00
self._request.ws_stream = StreamHixie75(self._request, True)
def _set_challenge_response(self):
# 5.2 4-8.
self._request.ws_challenge = self._get_challenge()
# 5.2 9. let /response/ be the MD5 finterprint of /challenge/
self._request.ws_challenge_md5 = util.md5_hash(
self._request.ws_challenge).digest()
self._logger.debug(
'Challenge: %r (%s)',
self._request.ws_challenge,
util.hexify(self._request.ws_challenge))
self._logger.debug(
'Challenge response: %r (%s)',
self._request.ws_challenge_md5,
util.hexify(self._request.ws_challenge_md5))
def _get_key_value(self, key_field):
key_value = get_mandatory_header(self._request, key_field)
self._logger.debug('%s: %r', key_field, key_value)
# 5.2 4. let /key-number_n/ be the digits (characters in the range
# U+0030 DIGIT ZERO (0) to U+0039 DIGIT NINE (9)) in /key_n/,
# interpreted as a base ten integer, ignoring all other characters
# in /key_n/.
try:
key_number = int(re.sub("\\D", "", key_value))
except:
raise HandshakeException('%s field contains no digit' % key_field)
# 5.2 5. let /spaces_n/ be the number of U+0020 SPACE characters
# in /key_n/.
spaces = re.subn(" ", "", key_value)[1]
if spaces == 0:
raise HandshakeException('%s field contains no space' % key_field)
self._logger.debug(
'%s: Key-number is %d and number of spaces is %d',
key_field, key_number, spaces)
# 5.2 6. if /key-number_n/ is not an integral multiple of /spaces_n/
# then abort the WebSocket connection.
if key_number % spaces != 0:
raise HandshakeException(
'%s: Key-number (%d) is not an integral multiple of spaces '
'(%d)' % (key_field, key_number, spaces))
# 5.2 7. let /part_n/ be /key-number_n/ divided by /spaces_n/.
part = key_number / spaces
self._logger.debug('%s: Part is %d', key_field, part)
return part
def _get_challenge(self):
# 5.2 4-7.
key1 = self._get_key_value(common.SEC_WEBSOCKET_KEY1_HEADER)
key2 = self._get_key_value(common.SEC_WEBSOCKET_KEY2_HEADER)
# 5.2 8. let /challenge/ be the concatenation of /part_1/,
challenge = ''
challenge += struct.pack('!I', key1) # network byteorder int
challenge += struct.pack('!I', key2) # network byteorder int
challenge += self._request.connection.read(8)
return challenge
def _send_handshake(self):
response = []
# 5.2 10. send the following line.
response.append('HTTP/1.1 101 WebSocket Protocol Handshake\r\n')
# 5.2 11. send the following fields to the client.
response.append(format_header(
common.UPGRADE_HEADER, common.WEBSOCKET_UPGRADE_TYPE_HIXIE75))
response.append(format_header(
common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE))
response.append(format_header(
common.SEC_WEBSOCKET_LOCATION_HEADER, self._request.ws_location))
response.append(format_header(
common.SEC_WEBSOCKET_ORIGIN_HEADER, self._request.ws_origin))
if self._request.ws_protocol:
response.append(format_header(
common.SEC_WEBSOCKET_PROTOCOL_HEADER,
self._request.ws_protocol))
# 5.2 12. send two bytes 0x0D 0x0A.
response.append('\r\n')
# 5.2 13. send /response/
response.append(self._request.ws_challenge_md5)
raw_response = ''.join(response)
self._request.connection.write(raw_response)
self._logger.debug('Sent server\'s opening handshake: %r',
raw_response)
# vi:sts=4 sw=4 et
| bsd-3-clause |
rolandwz/pymisc | monitor/once.py | 1 | 1567 | # -*- coding: utf-8 -*-
import threading, sys
from utils.rwlogging import log
from notifier import fetion
from notifier import mail
from category import prices
from category import weather
from utils import const
def weatherMonitor():
try:
#msg = ''
#msg = msg + weather.fetchWeather()
multimsg = [0, 0]
multimsg[0] = [0, 0]
multimsg[0][0] = weather.fetchWeather('101010200')
multimsg[0][1] = const.WEATHER_BJ_MOBILES
multimsg[1] = [0, 0]
multimsg[1][0] = weather.fetchWeather('101180101')
multimsg[1][1] = const.WEATHER_ZZ_MOBILES
sendMultiMessage('Weather', multimsg)
except:
log.exception('weatherMonitor Exception Occured!')
def pm25Monitor():
try:
msg = ''
msg = msg + weather.fetchPm25Forcast()
if msg:
log.info('* pm25Monitor MESSAGE * ' + msg)
sendMessage('PM2.5', msg, 2)
except:
log.exception('pm25Monitor Exception Occured!')
def sendMultiMessage(mtype, multimsg):
try:
#for msgs in multimsg:
# mail.send(mtype, msgs[0])
pass
except:
log.exception('Email Exception Occured!')
try:
fetion.sendMultiSms(multimsg)
pass
except:
log.exception('Fetion Exception Occured!')
def sendMessage(mtype, msg, rtype):
multimsg = [0]
multimsg[0] = [0, 0]
multimsg[0][0] = msg
if rtype == 1:
multimsg[0][1] = const.SELF_MOBILE
else:
multimsg[0][1] = const.WEATHER_BJ_MOBILES
sendMultiMessage(mtype, multimsg)
if __name__ == "__main__":
if len(sys.argv) < 2:
print 'no arg'
exit(0)
arg = sys.argv[1]
if arg == 'weather':
weatherMonitor()
if arg == 'pm25':
pm25Monitor()
| mit |
kronicz/ecommerce-2 | lib/python2.7/site-packages/django/views/csrf.py | 437 | 5057 | from django.conf import settings
from django.http import HttpResponseForbidden
from django.template import Context, Engine
from django.utils.translation import ugettext as _
from django.utils.version import get_docs_version
# We include the template inline since we need to be able to reliably display
# this error message, especially for the sake of developers, and there isn't any
# other way of making it available independent of what is in the settings file.
# Only the text appearing with DEBUG=False is translated. Normal translation
# tags cannot be used with this inline templates as makemessages would not be
# able to discover the strings.
CSRF_FAILURE_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>403 Forbidden</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
#info { background:#f6f6f6; }
#info ul { margin: 0.5em 4em; }
#info p, #summary p { padding-top:10px; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>{{ title }} <span>(403)</span></h1>
<p>{{ main }}</p>
{% if no_referer %}
<p>{{ no_referer1 }}</p>
<p>{{ no_referer2 }}</p>
{% endif %}
{% if no_cookie %}
<p>{{ no_cookie1 }}</p>
<p>{{ no_cookie2 }}</p>
{% endif %}
</div>
{% if DEBUG %}
<div id="info">
<h2>Help</h2>
{% if reason %}
<p>Reason given for failure:</p>
<pre>
{{ reason }}
</pre>
{% endif %}
<p>In general, this can occur when there is a genuine Cross Site Request Forgery, or when
<a
href="https://docs.djangoproject.com/en/{{ docs_version }}/ref/csrf/">Django's
CSRF mechanism</a> has not been used correctly. For POST forms, you need to
ensure:</p>
<ul>
<li>Your browser is accepting cookies.</li>
<li>The view function passes a <code>request</code> to the template's <a
href="https://docs.djangoproject.com/en/dev/topics/templates/#django.template.backends.base.Template.render"><code>render</code></a>
method.</li>
<li>In the template, there is a <code>{% templatetag openblock %} csrf_token
{% templatetag closeblock %}</code> template tag inside each POST form that
targets an internal URL.</li>
<li>If you are not using <code>CsrfViewMiddleware</code>, then you must use
<code>csrf_protect</code> on any views that use the <code>csrf_token</code>
template tag, as well as those that accept the POST data.</li>
</ul>
<p>You're seeing the help section of this page because you have <code>DEBUG =
True</code> in your Django settings file. Change that to <code>False</code>,
and only the initial error message will be displayed. </p>
<p>You can customize this page using the CSRF_FAILURE_VIEW setting.</p>
</div>
{% else %}
<div id="explanation">
<p><small>{{ more }}</small></p>
</div>
{% endif %}
</body>
</html>
"""
def csrf_failure(request, reason=""):
"""
Default view used when request fails CSRF protection
"""
from django.middleware.csrf import REASON_NO_REFERER, REASON_NO_CSRF_COOKIE
t = Engine().from_string(CSRF_FAILURE_TEMPLATE)
c = Context({
'title': _("Forbidden"),
'main': _("CSRF verification failed. Request aborted."),
'reason': reason,
'no_referer': reason == REASON_NO_REFERER,
'no_referer1': _(
"You are seeing this message because this HTTPS site requires a "
"'Referer header' to be sent by your Web browser, but none was "
"sent. This header is required for security reasons, to ensure "
"that your browser is not being hijacked by third parties."),
'no_referer2': _(
"If you have configured your browser to disable 'Referer' headers, "
"please re-enable them, at least for this site, or for HTTPS "
"connections, or for 'same-origin' requests."),
'no_cookie': reason == REASON_NO_CSRF_COOKIE,
'no_cookie1': _(
"You are seeing this message because this site requires a CSRF "
"cookie when submitting forms. This cookie is required for "
"security reasons, to ensure that your browser is not being "
"hijacked by third parties."),
'no_cookie2': _(
"If you have configured your browser to disable cookies, please "
"re-enable them, at least for this site, or for 'same-origin' "
"requests."),
'DEBUG': settings.DEBUG,
'docs_version': get_docs_version(),
'more': _("More information is available with DEBUG=True."),
})
return HttpResponseForbidden(t.render(c), content_type='text/html')
| mit |
zhaodelong/django | tests/flatpages_tests/test_templatetags.py | 309 | 7111 | from django.contrib.auth.models import AnonymousUser, User
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.template import Context, Template, TemplateSyntaxError
from django.test import TestCase, modify_settings, override_settings
from .settings import FLATPAGES_TEMPLATES
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.flatpages'})
@override_settings(
MIDDLEWARE_CLASSES=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
],
ROOT_URLCONF='flatpages_tests.urls',
TEMPLATES=FLATPAGES_TEMPLATES,
SITE_ID=1,
)
class FlatpageTemplateTagTests(TestCase):
@classmethod
def setUpTestData(cls):
# don't use the manager because we want to ensure the site exists
# with pk=1, regardless of whether or not it already exists.
cls.site1 = Site(pk=1, domain='example.com', name='example.com')
cls.site1.save()
cls.fp1 = FlatPage.objects.create(
url='/flatpage/', title='A Flatpage', content="Isn't it flat!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp2 = FlatPage.objects.create(
url='/location/flatpage/', title='A Nested Flatpage', content="Isn't it flat and deep!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp3 = FlatPage.objects.create(
url='/sekrit/', title='Sekrit Flatpage', content="Isn't it sekrit!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp4 = FlatPage.objects.create(
url='/location/sekrit/', title='Sekrit Nested Flatpage', content="Isn't it sekrit and deep!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp1.sites.add(cls.site1)
cls.fp2.sites.add(cls.site1)
cls.fp3.sites.add(cls.site1)
cls.fp4.sites.add(cls.site1)
def test_get_flatpages_tag(self):
"The flatpage template tag retrieves unregistered prefixed flatpages by default"
out = Template(
"{% load flatpages %}"
"{% get_flatpages as flatpages %}"
"{% for page in flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context())
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
def test_get_flatpages_tag_for_anon_user(self):
"The flatpage template tag retrieves unregistered flatpages for an anonymous user"
out = Template(
"{% load flatpages %}"
"{% get_flatpages for anonuser as flatpages %}"
"{% for page in flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'anonuser': AnonymousUser()
}))
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
def test_get_flatpages_tag_for_user(self):
"The flatpage template tag retrieves all flatpages for an authenticated user"
me = User.objects.create_user('testuser', '[email protected]', 's3krit')
out = Template(
"{% load flatpages %}"
"{% get_flatpages for me as flatpages %}"
"{% for page in flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'me': me
}))
self.assertEqual(out, "A Flatpage,A Nested Flatpage,Sekrit Nested Flatpage,Sekrit Flatpage,")
def test_get_flatpages_with_prefix(self):
"The flatpage template tag retrieves unregistered prefixed flatpages by default"
out = Template(
"{% load flatpages %}"
"{% get_flatpages '/location/' as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context())
self.assertEqual(out, "A Nested Flatpage,")
def test_get_flatpages_with_prefix_for_anon_user(self):
"The flatpage template tag retrieves unregistered prefixed flatpages for an anonymous user"
out = Template(
"{% load flatpages %}"
"{% get_flatpages '/location/' for anonuser as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'anonuser': AnonymousUser()
}))
self.assertEqual(out, "A Nested Flatpage,")
def test_get_flatpages_with_prefix_for_user(self):
"The flatpage template tag retrieve prefixed flatpages for an authenticated user"
me = User.objects.create_user('testuser', '[email protected]', 's3krit')
out = Template(
"{% load flatpages %}"
"{% get_flatpages '/location/' for me as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'me': me
}))
self.assertEqual(out, "A Nested Flatpage,Sekrit Nested Flatpage,")
def test_get_flatpages_with_variable_prefix(self):
"The prefix for the flatpage template tag can be a template variable"
out = Template(
"{% load flatpages %}"
"{% get_flatpages location_prefix as location_flatpages %}"
"{% for page in location_flatpages %}"
"{{ page.title }},"
"{% endfor %}"
).render(Context({
'location_prefix': '/location/'
}))
self.assertEqual(out, "A Nested Flatpage,")
def test_parsing_errors(self):
"There are various ways that the flatpages template tag won't parse"
render = lambda t: Template(t).render(Context())
self.assertRaises(TemplateSyntaxError, render,
"{% load flatpages %}{% get_flatpages %}")
self.assertRaises(TemplateSyntaxError, render,
"{% load flatpages %}{% get_flatpages as %}")
self.assertRaises(TemplateSyntaxError, render,
"{% load flatpages %}{% get_flatpages cheesecake flatpages %}")
self.assertRaises(TemplateSyntaxError, render,
"{% load flatpages %}{% get_flatpages as flatpages asdf %}")
self.assertRaises(TemplateSyntaxError, render,
"{% load flatpages %}{% get_flatpages cheesecake user as flatpages %}")
self.assertRaises(TemplateSyntaxError, render,
"{% load flatpages %}{% get_flatpages for user as flatpages asdf %}")
self.assertRaises(TemplateSyntaxError, render,
"{% load flatpages %}{% get_flatpages prefix for user as flatpages asdf %}")
| bsd-3-clause |
shiblon/pytour | static/js/pypyjs/pypy-nojit.js-0.3.1/lib/modules/pyrepl/cmdrepl.py | 10 | 4094 | # Copyright 2000-2007 Michael Hudson-Doyle <[email protected]>
# Maciek Fijalkowski
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Wedge pyrepl behaviour into cmd.Cmd-derived classes.
replize, when given a subclass of cmd.Cmd, returns a class that
behaves almost identically to the supplied class, except that it uses
pyrepl instead if raw_input.
It was designed to let you do this:
>>> import pdb
>>> from pyrepl import replize
>>> pdb.Pdb = replize(pdb.Pdb)
which is in fact done by the `pythoni' script that comes with
pyrepl."""
from __future__ import nested_scopes
from pyrepl import completing_reader as cr, reader, completer
from pyrepl.completing_reader import CompletingReader as CR
import cmd
class CmdReader(CR):
def collect_keymap(self):
return super(CmdReader, self).collect_keymap() + (
("\\M-\\n", "invalid-key"),
("\\n", "accept"))
CR_init = CR.__init__
def __init__(self, completions):
self.CR_init(self)
self.completions = completions
def get_completions(self, stem):
if len(stem) != self.pos:
return []
return sorted(set(s for s in self.completions
if s.startswith(stem)))
def replize(klass, history_across_invocations=1):
"""Return a subclass of the cmd.Cmd-derived klass that uses
pyrepl instead of readline.
Raises a ValueError if klass does not derive from cmd.Cmd.
The optional history_across_invocations parameter (default 1)
controls whether instances of the returned class share
histories."""
completions = [s[3:]
for s in completer.get_class_members(klass)
if s.startswith("do_")]
if not issubclass(klass, cmd.Cmd):
raise Exception
# if klass.cmdloop.im_class is not cmd.Cmd:
# print "this may not work"
class CmdRepl(klass):
k_init = klass.__init__
if history_across_invocations:
_CmdRepl__history = []
def __init__(self, *args, **kw):
self.k_init(*args, **kw)
self.__reader = CmdReader(completions)
self.__reader.history = CmdRepl._CmdRepl__history
self.__reader.historyi = len(CmdRepl._CmdRepl__history)
else:
def __init__(self, *args, **kw):
self.k_init(*args, **kw)
self.__reader = CmdReader(completions)
def cmdloop(self, intro=None):
self.preloop()
if intro is not None:
self.intro = intro
if self.intro:
print self.intro
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue[0]
del self.cmdqueue[0]
else:
try:
self.__reader.ps1 = self.prompt
line = self.__reader.readline()
except EOFError:
line = "EOF"
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
CmdRepl.__name__ = "replize(%s.%s)"%(klass.__module__, klass.__name__)
return CmdRepl
| apache-2.0 |
fhaoquan/kbengine | kbe/res/scripts/common/Lib/test/test_importlib/import_/test___loader__.py | 84 | 1895 | from importlib import machinery
import sys
import types
import unittest
from .. import util
from . import util as import_util
class SpecLoaderMock:
def find_spec(self, fullname, path=None, target=None):
return machinery.ModuleSpec(fullname, self)
def exec_module(self, module):
pass
class SpecLoaderAttributeTests:
def test___loader__(self):
loader = SpecLoaderMock()
with util.uncache('blah'), util.import_state(meta_path=[loader]):
module = self.__import__('blah')
self.assertEqual(loader, module.__loader__)
Frozen_SpecTests, Source_SpecTests = util.test_both(
SpecLoaderAttributeTests, __import__=import_util.__import__)
class LoaderMock:
def find_module(self, fullname, path=None):
return self
def load_module(self, fullname):
sys.modules[fullname] = self.module
return self.module
class LoaderAttributeTests:
def test___loader___missing(self):
module = types.ModuleType('blah')
try:
del module.__loader__
except AttributeError:
pass
loader = LoaderMock()
loader.module = module
with util.uncache('blah'), util.import_state(meta_path=[loader]):
module = self.__import__('blah')
self.assertEqual(loader, module.__loader__)
def test___loader___is_None(self):
module = types.ModuleType('blah')
module.__loader__ = None
loader = LoaderMock()
loader.module = module
with util.uncache('blah'), util.import_state(meta_path=[loader]):
returned_module = self.__import__('blah')
self.assertEqual(loader, module.__loader__)
Frozen_Tests, Source_Tests = util.test_both(LoaderAttributeTests,
__import__=import_util.__import__)
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/boto-2.38.0/boto/rds/statusinfo.py | 180 | 2011 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
class StatusInfo(object):
"""
Describes a status message.
"""
def __init__(self, status_type=None, normal=None, status=None, message=None):
self.status_type = status_type
self.normal = normal
self.status = status
self.message = message
def __repr__(self):
return 'StatusInfo:%s' % self.message
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'StatusType':
self.status_type = value
elif name == 'Normal':
if value.lower() == 'true':
self.normal = True
else:
self.normal = False
elif name == 'Status':
self.status = value
elif name == 'Message':
self.message = value
else:
setattr(self, name, value)
| mit |
CWRoos/msp430-python-raspberry-pi | msp430/asm/infix2postfix.py | 2 | 5413 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010 Chris Liechti <[email protected]>
# All Rights Reserved.
# Simplified BSD License (see LICENSE.txt for full text)
"""\
Parse algebraic expressions (infix) and output postfix notation.
"""
import re
class Scanner(object):
scannerRE = re.compile(r'''
(?P<SPACE> \s+ ) |
(?P<LEFT> \( ) |
(?P<RIGHT> \) ) |
(?P<HEXNUMBER> 0x[0-9a-f]+ ) |
(?P<BINNUMBER> 0b[01]+ ) |
(?P<NUMBER> \d+ ) |
(?P<UNARYOPERATOR> not|(\B[-+~]\b) ) |
(?P<OPERATOR> or|and|<<|>>|==|!=|<=|>=|[-+*/\|&\^<>] ) |
(?P<VARIABLE> \.?[$_a-z]\w* )
''', re.VERBOSE|re.IGNORECASE|re.UNICODE)
def __init__(self, source):
self.source = source
self.pos = 0
self.len = len(source)
def scan(self):
while True:
if self.pos >= self.len:
return None, None
m = self.scannerRE.match(self.source, self.pos)
if m is None:
raise ValueError('invalid token: %r...' % (self.source[self.pos:self.pos+10],))
self.pos = m.end()
token_type = m.lastgroup
if token_type != 'SPACE':
token = m.group(token_type)
if token_type == 'HEXNUMBER':
token = int(token, 16)
token_type = 'NUMBER'
elif token_type == 'BINNUMBER':
token = int(token[2:], 2)
token_type = 'NUMBER'
elif token_type == 'NUMBER':
token = int(token)
elif token_type == 'UNARYOPERATOR':
if token == '-': token = 'neg'
elif token == '+': token = '0 +'
return token_type, token
default_precedence_list = [
# lowest precedence
['or'],
['and'],
['not'],
['<', '<=', '>', '>=', '==', '!='],
['|', '^', '&'],
['<<', '>>'],
['+', '-'],
['*', '/', '%'],
['~', 'neg', '0 +'],
['(', ')'],
# highest precedence
]
def convert_precedence_list(precedence_list):
precedence = {}
for priority, equals in enumerate(precedence_list):
for operator in equals:
precedence[operator] = priority
return precedence
default_precedence = convert_precedence_list(default_precedence_list)
def print_precedence_list():
print "Operator precedence from lowest to highest:"
for priority, equals in enumerate(precedence_list):
print '%d: %s' % (priority, ' '.join(equals))
#~ print_precedence_list()
def infix2postfix(expression, variable_prefix='', scanner=Scanner, precedence=default_precedence):
"""\
Convert an expression to postfix notation (RPN), respecting parentheses and
operator precedences.
>>> infix2postfix(u'1+2')
u'1 2 +'
>>> infix2postfix(u'1+ 2')
u'1 2 +'
# >>> infix2postfix(u'1 +2') # interpreted as unary plus, OK
# u'1 2 +
>>> infix2postfix(u'1 + 2')
u'1 2 +'
>>> infix2postfix(u'1+2*3')
u'1 2 3 * +'
>>> infix2postfix(u'(1+2)*3')
u'1 2 + 3 *'
# unary operators
>>> infix2postfix(u'( -1+2) * -3-4')
u'1 neg 2 + 3 neg * 4 -'
>>> infix2postfix(u'1/2 == 3')
u'1 2 / 3 =='
>>> infix2postfix(u'1 < 2 or 3 < 4')
u'1 2 < 3 4 < or'
>>> infix2postfix(u'1 <= 2 and 3 >= 4')
u'1 2 <= 3 4 >= and'
>>> infix2postfix(u'7 & 3 != 0')
u'7 3 & 0 !='
>>> infix2postfix('not 4 + 1')
u'4 1 + not'
>>> infix2postfix('~A + 1')
u'A ~ 1 +'
"""
operator_stack = []
output_string = []
s = scanner(expression)
while True:
token_type, token = s.scan()
#~ print token_type, token
if token_type is None: break
elif token_type == 'LEFT':
operator_stack.append(token)
elif token_type == 'VARIABLE':
output_string.append(u'%s%s' % (variable_prefix, token))
elif token_type == 'NUMBER':
output_string.append(token)
elif token_type == 'OPERATOR' or token_type == 'UNARYOPERATOR':
if (not operator_stack
or operator_stack[-1] == '('
or precedence[operator_stack[-1]] < precedence[token]):
operator_stack.append(token)
else:
while True:
output_string.append(operator_stack.pop())
if (not operator_stack
or operator_stack[-1] == '('
or precedence[token] >= precedence[operator_stack[-1]]):
break
operator_stack.append(token)
elif token_type == 'RIGHT':
while operator_stack[-1] != '(':
output_string.append(operator_stack.pop())
operator_stack.pop() # the '(' itself
else:
raise ValueError(u'unknown token: %r' % (token_type,))
if '(' in operator_stack:
raise ValueError('Unbalanced (, )')
while operator_stack:
output_string.append(operator_stack.pop())
return u' '.join(unicode(s) for s in output_string)
if __name__ == '__main__':
import doctest
doctest.testmod()
| bsd-3-clause |
ssalevan/cobbler | cobbler/func_utils.py | 17 | 1129 | """
Misc func functions for cobbler
Copyright 2006-2008, Red Hat, Inc
Scott Henson <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
try:
import func.overlord.client as func
from func.CommonErrors import Func_Client_Exception
HAZFUNC=True
except ImportError:
HAZFUNC=False
except IOError:
# cant import Func because we're not root, for instance, we're likely
# running from Apache and we've pulled this in from importing utils
HAZFUNC=False
| gpl-2.0 |
py4n6/aff4 | scons-local-1.2.0/SCons/Tool/dvi.py | 12 | 2265 | """SCons.Tool.dvi
Common DVI Builder definition for various other Tool modules that use it.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/dvi.py 3842 2008/12/20 22:59:52 scons"
import SCons.Builder
import SCons.Tool
DVIBuilder = None
def generate(env):
try:
env['BUILDERS']['DVI']
except KeyError:
global DVIBuilder
if DVIBuilder is None:
# The suffix is hard-coded to '.dvi', not configurable via a
# construction variable like $DVISUFFIX, because the output
# file name is hard-coded within TeX.
DVIBuilder = SCons.Builder.Builder(action = {},
source_scanner = SCons.Tool.LaTeXScanner,
suffix = '.dvi',
emitter = {},
source_ext_match = None)
env['BUILDERS']['DVI'] = DVIBuilder
def exists(env):
# This only puts a skeleton Builder in place, so if someone
# references this Tool directly, it's always "available."
return 1
| gpl-3.0 |
vaygr/ansible | lib/ansible/modules/network/nxos/nxos_system.py | 31 | 11749 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: nxos_system
extends_documentation_fragment: nxos
version_added: "2.3"
author: "Peter Sprygada (@privateip)"
short_description: Manage the system attributes on Cisco NXOS devices
description:
- This module provides declarative management of node system attributes
on Cisco NXOS devices. It provides an option to configure host system
parameters or remove those parameters from the device active
configuration.
options:
hostname:
description:
- Configure the device hostname parameter. This option takes an ASCII string value.
domain_name:
description:
- Configures the default domain
name suffix to be used when referencing this node by its
FQDN. This argument accepts either a list of domain names or
a list of dicts that configure the domain name and VRF name. See
examples.
domain_lookup:
description:
- Enables or disables the DNS
lookup feature in Cisco NXOS. This argument accepts boolean
values. When enabled, the system will try to resolve hostnames
using DNS and when disabled, hostnames will not be resolved.
domain_search:
description:
- Configures a list of domain
name suffixes to search when performing DNS name resolution.
This argument accepts either a list of domain names or
a list of dicts that configure the domain name and VRF name. See
examples.
name_servers:
description:
- List of DNS name servers by IP address to use to perform name resolution
lookups. This argument accepts either a list of DNS servers or
a list of hashes that configure the name server and VRF name. See
examples.
system_mtu:
description:
- Specifies the mtu, must be an integer.
state:
description:
- State of the configuration
values in the device's current active configuration. When set
to I(present), the values should be configured in the device active
configuration and when set to I(absent) the values should not be
in the device active configuration
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: configure hostname and domain-name
nxos_system:
hostname: nxos01
domain_name: test.example.com
- name: remove configuration
nxos_system:
state: absent
- name: configure name servers
nxos_system:
name_servers:
- 8.8.8.8
- 8.8.4.4
- name: configure name servers with VRF support
nxos_system:
name_servers:
- { server: 8.8.8.8, vrf: mgmt }
- { server: 8.8.4.4, vrf: mgmt }
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- hostname nxos01
- ip domain-name test.example.com
"""
import re
from ansible.module_utils.network.nxos.nxos import get_config, load_config
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.network.common.config import NetworkConfig
from ansible.module_utils.network.common.utils import ComplexList
_CONFIGURED_VRFS = None
def has_vrf(module, vrf):
global _CONFIGURED_VRFS
if _CONFIGURED_VRFS is not None:
return vrf in _CONFIGURED_VRFS
config = get_config(module)
_CONFIGURED_VRFS = re.findall(r'vrf context (\S+)', config)
return vrf in _CONFIGURED_VRFS
def map_obj_to_commands(want, have, module):
commands = list()
state = module.params['state']
def needs_update(x):
return want.get(x) and (want.get(x) != have.get(x))
def difference(x, y, z):
return [item for item in x[z] if item not in y[z]]
def remove(cmd, commands, vrf=None):
if vrf:
commands.append('vrf context %s' % vrf)
commands.append(cmd)
if vrf:
commands.append('exit')
def add(cmd, commands, vrf=None):
if vrf:
if not has_vrf(module, vrf):
module.fail_json(msg='invalid vrf name %s' % vrf)
return remove(cmd, commands, vrf)
if state == 'absent':
if have['hostname']:
commands.append('no hostname')
for item in have['domain_name']:
cmd = 'no ip domain-name %s' % item['name']
remove(cmd, commands, item['vrf'])
for item in have['domain_search']:
cmd = 'no ip domain-list %s' % item['name']
remove(cmd, commands, item['vrf'])
for item in have['name_servers']:
cmd = 'no ip name-server %s' % item['server']
remove(cmd, commands, item['vrf'])
if have['system_mtu']:
commands.append('no system jumbomtu')
if state == 'present':
if needs_update('hostname'):
commands.append('hostname %s' % want['hostname'])
if needs_update('domain_lookup'):
cmd = 'ip domain-lookup'
if want['domain_lookup'] is False:
cmd = 'no %s' % cmd
commands.append(cmd)
if want['domain_name']:
for item in difference(have, want, 'domain_name'):
cmd = 'no ip domain-name %s' % item['name']
remove(cmd, commands, item['vrf'])
for item in difference(want, have, 'domain_name'):
cmd = 'ip domain-name %s' % item['name']
add(cmd, commands, item['vrf'])
if want['domain_search']:
for item in difference(have, want, 'domain_search'):
cmd = 'no ip domain-list %s' % item['name']
remove(cmd, commands, item['vrf'])
for item in difference(want, have, 'domain_search'):
cmd = 'ip domain-list %s' % item['name']
add(cmd, commands, item['vrf'])
if want['name_servers']:
for item in difference(have, want, 'name_servers'):
cmd = 'no ip name-server %s' % item['server']
remove(cmd, commands, item['vrf'])
for item in difference(want, have, 'name_servers'):
cmd = 'ip name-server %s' % item['server']
add(cmd, commands, item['vrf'])
if needs_update('system_mtu'):
commands.append('system jumbomtu %s' % want['system_mtu'])
return commands
def parse_hostname(config):
match = re.search(r'^hostname (\S+)', config, re.M)
if match:
return match.group(1)
def parse_domain_name(config, vrf_config):
objects = list()
regex = re.compile(r'ip domain-name (\S+)')
match = regex.search(config, re.M)
if match:
objects.append({'name': match.group(1), 'vrf': None})
for vrf, cfg in iteritems(vrf_config):
match = regex.search(cfg, re.M)
if match:
objects.append({'name': match.group(1), 'vrf': vrf})
return objects
def parse_domain_search(config, vrf_config):
objects = list()
for item in re.findall(r'^ip domain-list (\S+)', config, re.M):
objects.append({'name': item, 'vrf': None})
for vrf, cfg in iteritems(vrf_config):
for item in re.findall(r'ip domain-list (\S+)', cfg, re.M):
objects.append({'name': item, 'vrf': vrf})
return objects
def parse_name_servers(config, vrf_config, vrfs):
objects = list()
match = re.search('^ip name-server (.+)$', config, re.M)
if match:
for addr in match.group(1).split(' '):
if addr == 'use-vrf' or addr in vrfs:
continue
objects.append({'server': addr, 'vrf': None})
for vrf, cfg in iteritems(vrf_config):
vrf_match = re.search('ip name-server (.+)', cfg, re.M)
if vrf_match:
for addr in vrf_match.group(1).split(' '):
objects.append({'server': addr, 'vrf': vrf})
return objects
def parse_system_mtu(config):
match = re.search(r'^system jumbomtu (\d+)', config, re.M)
if match:
return int(match.group(1))
def map_config_to_obj(module):
config = get_config(module)
configobj = NetworkConfig(indent=2, contents=config)
vrf_config = {}
vrfs = re.findall(r'^vrf context (\S+)$', config, re.M)
for vrf in vrfs:
config_data = configobj.get_block_config(path=['vrf context %s' % vrf])
vrf_config[vrf] = config_data
return {
'hostname': parse_hostname(config),
'domain_lookup': 'no ip domain-lookup' not in config,
'domain_name': parse_domain_name(config, vrf_config),
'domain_search': parse_domain_search(config, vrf_config),
'name_servers': parse_name_servers(config, vrf_config, vrfs),
'system_mtu': parse_system_mtu(config)
}
def validate_system_mtu(value, module):
if not 1500 <= value <= 9216:
module.fail_json(msg='system_mtu must be between 1500 and 9216')
def map_params_to_obj(module):
obj = {
'hostname': module.params['hostname'],
'domain_lookup': module.params['domain_lookup'],
'system_mtu': module.params['system_mtu']
}
domain_name = ComplexList(dict(
name=dict(key=True),
vrf=dict()
), module)
domain_search = ComplexList(dict(
name=dict(key=True),
vrf=dict()
), module)
name_servers = ComplexList(dict(
server=dict(key=True),
vrf=dict()
), module)
for arg, cast in [('domain_name', domain_name), ('domain_search', domain_search),
('name_servers', name_servers)]:
if module.params[arg] is not None:
obj[arg] = cast(module.params[arg])
else:
obj[arg] = None
return obj
def main():
""" main entry point for module execution
"""
argument_spec = dict(
hostname=dict(),
domain_lookup=dict(type='bool'),
# { name: <str>, vrf: <str> }
domain_name=dict(type='list'),
# {name: <str>, vrf: <str> }
domain_search=dict(type='list'),
# { server: <str>; vrf: <str> }
name_servers=dict(type='list'),
system_mtu=dict(type='int'),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands(want, have, module)
result['commands'] = commands
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
leopoul/ncclient | examples/juniper/edit-config-jnpr-text.py | 6 | 1488 | #!/usr/bin/env python
import logging
import sys
from ncclient import manager
def connect(host, port, user, password):
conn = manager.connect(host=host,
port=port,
username=user,
password=password,
timeout=60,
device_params={'name': 'junos'},
hostkey_verify=False)
conn.lock()
new_host_name = 'foo-bar'
# configuration as a text string
location = """
system {
location {
building "Main Campus, E";
floor 15;
rack 1117;
}
}
"""
load_config_result = conn.load_configuration(format='text', config=location)
logging.info(load_config_result)
# configuration as an argument
load_config_result = conn.load_configuration(format='text', config="""
system {
host-name %s;
}
""" % new_host_name)
logging.info(load_config_result)
validate_result = conn.validate()
logging.info(validate_result)
compare_config_result = conn.compare_configuration()
logging.info(compare_config_result)
conn.commit()
conn.unlock()
conn.close_session()
if __name__ == '__main__':
LOG_FORMAT = '%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(message)s'
logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=LOG_FORMAT)
connect('router', '22', 'netconf', 'juniper!')
| apache-2.0 |
mobify/dj-saml-idp | idptest/scripts/c14n.py | 2 | 1095 | #!/usr/bin/python
from StringIO import StringIO
from lxml import etree
def c14n(src):
f = StringIO(src)
tree = etree.parse(f)
f2 = StringIO()
tree.write_c14n(f2)
return f2.getvalue().decode("utf-8")
if __name__=="__main__":
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--fix",
action="store_true", dest="fix", default=False,
help="Fix file(s) by overwriting original with canonicalized XML.")
(options, args) = parser.parse_args()
if len(args) < 1:
print "c14n - Canonicalize an XML file to stdout"
print "Usage: c14n [--fix] FILENAMES"
else:
for filename in args:
# print 'Processing ' + filename + '...'
f = open(filename, "r")
data = f.read()
f.close()
if options.fix:
fixed = c14n(data)
g = open(filename, "w")
g.write(fixed)
g.close()
print "Fixed " + filename
else:
print c14n(data)
| mit |
servo/servo | tests/wpt/web-platform-tests/tools/third_party/pywebsocket3/test/test_endtoend.py | 12 | 27179 | #!/usr/bin/env python
#
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""End-to-end tests for pywebsocket. Tests standalone.py.
"""
from __future__ import absolute_import
from six.moves import urllib
import locale
import logging
import os
import signal
import socket
import subprocess
import sys
import time
import unittest
import set_sys_path # Update sys.path to locate mod_pywebsocket module.
from test import client_for_testing
# Special message that tells the echo server to start closing handshake
_GOODBYE_MESSAGE = 'Goodbye'
_SERVER_WARMUP_IN_SEC = 0.2
# Test body functions
def _echo_check_procedure(client):
client.connect()
client.send_message('test')
client.assert_receive('test')
client.send_message('helloworld')
client.assert_receive('helloworld')
client.send_close()
client.assert_receive_close()
client.assert_connection_closed()
def _echo_check_procedure_with_binary(client):
client.connect()
client.send_message(b'binary', binary=True)
client.assert_receive(b'binary', binary=True)
client.send_message(b'\x00\x80\xfe\xff\x00\x80', binary=True)
client.assert_receive(b'\x00\x80\xfe\xff\x00\x80', binary=True)
client.send_close()
client.assert_receive_close()
client.assert_connection_closed()
def _echo_check_procedure_with_goodbye(client):
client.connect()
client.send_message('test')
client.assert_receive('test')
client.send_message(_GOODBYE_MESSAGE)
client.assert_receive(_GOODBYE_MESSAGE)
client.assert_receive_close()
client.send_close()
client.assert_connection_closed()
def _echo_check_procedure_with_code_and_reason(client, code, reason):
client.connect()
client.send_close(code, reason)
client.assert_receive_close(code, reason)
client.assert_connection_closed()
def _unmasked_frame_check_procedure(client):
client.connect()
client.send_message('test', mask=False)
client.assert_receive_close(client_for_testing.STATUS_PROTOCOL_ERROR, '')
client.assert_connection_closed()
def _check_handshake_with_basic_auth(client):
client.connect()
client.send_message(_GOODBYE_MESSAGE)
client.assert_receive(_GOODBYE_MESSAGE)
client.assert_receive_close()
client.send_close()
client.assert_connection_closed()
class EndToEndTestBase(unittest.TestCase):
"""Base class for end-to-end tests that launch pywebsocket standalone
server as a separate process, connect to it using the client_for_testing
module, and check if the server behaves correctly by exchanging opening
handshake and frames over a TCP connection.
"""
def setUp(self):
self.server_stderr = None
self.top_dir = os.path.join(os.path.dirname(__file__), '..')
os.putenv('PYTHONPATH', os.path.pathsep.join(sys.path))
self.standalone_command = os.path.join(self.top_dir, 'mod_pywebsocket',
'standalone.py')
self.document_root = os.path.join(self.top_dir, 'example')
s = socket.socket()
s.bind(('localhost', 0))
(_, self.test_port) = s.getsockname()
s.close()
self._options = client_for_testing.ClientOptions()
self._options.server_host = 'localhost'
self._options.origin = 'http://localhost'
self._options.resource = '/echo'
self._options.server_port = self.test_port
# TODO(tyoshino): Use tearDown to kill the server.
def _run_python_command(self, commandline, stdout=None, stderr=None):
close_fds = True if sys.platform != 'win32' else None
return subprocess.Popen([sys.executable] + commandline,
close_fds=close_fds,
stdout=stdout,
stderr=stderr)
def _run_server(self, extra_args=[]):
args = [
self.standalone_command, '-H', 'localhost', '-V', 'localhost',
'-p',
str(self.test_port), '-P',
str(self.test_port), '-d', self.document_root
]
# Inherit the level set to the root logger by test runner.
root_logger = logging.getLogger()
log_level = root_logger.getEffectiveLevel()
if log_level != logging.NOTSET:
args.append('--log-level')
args.append(logging.getLevelName(log_level).lower())
args += extra_args
return self._run_python_command(args, stderr=self.server_stderr)
def _close_server(self, server):
"""
This method mimics Popen.__exit__ to gracefully kill the server process.
Its main purpose is to maintain comptaibility between python 2 and 3,
since Popen in python 2 does not have __exit__ attribute.
"""
server.kill()
if server.stdout:
server.stdout.close()
if server.stderr:
server.stderr.close()
if server.stdin:
server.stdin.close()
server.wait()
class EndToEndHyBiTest(EndToEndTestBase):
def setUp(self):
EndToEndTestBase.setUp(self)
def _run_test_with_options(self,
test_function,
options,
server_options=[]):
server = self._run_server(server_options)
try:
# TODO(tyoshino): add some logic to poll the server until it
# becomes ready
time.sleep(_SERVER_WARMUP_IN_SEC)
client = client_for_testing.create_client(options)
try:
test_function(client)
finally:
client.close_socket()
finally:
self._close_server(server)
def _run_test(self, test_function):
self._run_test_with_options(test_function, self._options)
def _run_permessage_deflate_test(self, offer, response_checker,
test_function):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
self._options.extensions += offer
self._options.check_permessage_deflate = response_checker
client = client_for_testing.create_client(self._options)
try:
client.connect()
if test_function is not None:
test_function(client)
client.assert_connection_closed()
finally:
client.close_socket()
finally:
self._close_server(server)
def _run_close_with_code_and_reason_test(self,
test_function,
code,
reason,
server_options=[]):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client = client_for_testing.create_client(self._options)
try:
test_function(client, code, reason)
finally:
client.close_socket()
finally:
self._close_server(server)
def _run_http_fallback_test(self, options, status):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client = client_for_testing.create_client(options)
try:
client.connect()
self.fail('Could not catch HttpStatusException')
except client_for_testing.HttpStatusException as e:
self.assertEqual(status, e.status)
except Exception as e:
self.fail('Catch unexpected exception')
finally:
client.close_socket()
finally:
self._close_server(server)
def test_echo(self):
self._run_test(_echo_check_procedure)
def test_echo_binary(self):
self._run_test(_echo_check_procedure_with_binary)
def test_echo_server_close(self):
self._run_test(_echo_check_procedure_with_goodbye)
def test_unmasked_frame(self):
self._run_test(_unmasked_frame_check_procedure)
def test_echo_permessage_deflate(self):
def test_function(client):
# From the examples in the spec.
compressed_hello = b'\xf2\x48\xcd\xc9\xc9\x07\x00'
client._stream.send_data(compressed_hello,
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
compressed_hello,
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEqual('permessage-deflate', parameter.name())
self.assertEqual([], parameter.get_parameters())
self._run_permessage_deflate_test(['permessage-deflate'],
response_checker, test_function)
def test_echo_permessage_deflate_two_frames(self):
def test_function(client):
# From the examples in the spec.
client._stream.send_data(b'\xf2\x48\xcd',
client_for_testing.OPCODE_TEXT,
end=False,
rsv1=1)
client._stream.send_data(b'\xc9\xc9\x07\x00',
client_for_testing.OPCODE_TEXT)
client._stream.assert_receive_binary(
b'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEqual('permessage-deflate', parameter.name())
self.assertEqual([], parameter.get_parameters())
self._run_permessage_deflate_test(['permessage-deflate'],
response_checker, test_function)
def test_echo_permessage_deflate_two_messages(self):
def test_function(client):
# From the examples in the spec.
client._stream.send_data(b'\xf2\x48\xcd\xc9\xc9\x07\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.send_data(b'\xf2\x00\x11\x00\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
b'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
b'\xf2\x00\x11\x00\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEqual('permessage-deflate', parameter.name())
self.assertEqual([], parameter.get_parameters())
self._run_permessage_deflate_test(['permessage-deflate'],
response_checker, test_function)
def test_echo_permessage_deflate_two_msgs_server_no_context_takeover(self):
def test_function(client):
# From the examples in the spec.
client._stream.send_data(b'\xf2\x48\xcd\xc9\xc9\x07\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.send_data(b'\xf2\x00\x11\x00\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
b'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
b'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEqual('permessage-deflate', parameter.name())
self.assertEqual([('server_no_context_takeover', None)],
parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate; server_no_context_takeover'],
response_checker, test_function)
def test_echo_permessage_deflate_preference(self):
def test_function(client):
# From the examples in the spec.
compressed_hello = b'\xf2\x48\xcd\xc9\xc9\x07\x00'
client._stream.send_data(compressed_hello,
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
compressed_hello,
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEqual('permessage-deflate', parameter.name())
self.assertEqual([], parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate', 'deflate-frame'], response_checker,
test_function)
def test_echo_permessage_deflate_with_parameters(self):
def test_function(client):
# From the examples in the spec.
compressed_hello = b'\xf2\x48\xcd\xc9\xc9\x07\x00'
client._stream.send_data(compressed_hello,
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
compressed_hello,
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEqual('permessage-deflate', parameter.name())
self.assertEqual([('server_max_window_bits', '10'),
('server_no_context_takeover', None)],
parameter.get_parameters())
self._run_permessage_deflate_test([
'permessage-deflate; server_max_window_bits=10; '
'server_no_context_takeover'
], response_checker, test_function)
def test_echo_permessage_deflate_with_bad_server_max_window_bits(self):
def test_function(client):
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
raise Exception('Unexpected acceptance of permessage-deflate')
self._run_permessage_deflate_test(
['permessage-deflate; server_max_window_bits=3000000'],
response_checker, test_function)
def test_echo_permessage_deflate_with_bad_server_max_window_bits(self):
def test_function(client):
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
raise Exception('Unexpected acceptance of permessage-deflate')
self._run_permessage_deflate_test(
['permessage-deflate; server_max_window_bits=3000000'],
response_checker, test_function)
def test_echo_permessage_deflate_with_undefined_parameter(self):
def test_function(client):
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
raise Exception('Unexpected acceptance of permessage-deflate')
self._run_permessage_deflate_test(['permessage-deflate; foo=bar'],
response_checker, test_function)
def test_echo_close_with_code_and_reason(self):
self._options.resource = '/close'
self._run_close_with_code_and_reason_test(
_echo_check_procedure_with_code_and_reason, 3333, 'sunsunsunsun')
def test_echo_close_with_empty_body(self):
self._options.resource = '/close'
self._run_close_with_code_and_reason_test(
_echo_check_procedure_with_code_and_reason, None, '')
def test_close_on_protocol_error(self):
"""Tests that the server sends a close frame with protocol error status
code when the client sends data with some protocol error.
"""
def test_function(client):
client.connect()
# Intermediate frame without any preceding start of fragmentation
# frame.
client.send_frame_of_arbitrary_bytes(b'\x80\x80', '')
client.assert_receive_close(
client_for_testing.STATUS_PROTOCOL_ERROR)
self._run_test(test_function)
def test_close_on_unsupported_frame(self):
"""Tests that the server sends a close frame with unsupported operation
status code when the client sends data asking some operation that is
not supported by the server.
"""
def test_function(client):
client.connect()
# Text frame with RSV3 bit raised.
client.send_frame_of_arbitrary_bytes(b'\x91\x80', '')
client.assert_receive_close(
client_for_testing.STATUS_UNSUPPORTED_DATA)
self._run_test(test_function)
def test_close_on_invalid_frame(self):
"""Tests that the server sends a close frame with invalid frame payload
data status code when the client sends an invalid frame like containing
invalid UTF-8 character.
"""
def test_function(client):
client.connect()
# Text frame with invalid UTF-8 string.
client.send_message(b'\x80', raw=True)
client.assert_receive_close(
client_for_testing.STATUS_INVALID_FRAME_PAYLOAD_DATA)
self._run_test(test_function)
def test_close_on_internal_endpoint_error(self):
"""Tests that the server sends a close frame with internal endpoint
error status code when the handler does bad operation.
"""
self._options.resource = '/internal_error'
def test_function(client):
client.connect()
client.assert_receive_close(
client_for_testing.STATUS_INTERNAL_ENDPOINT_ERROR)
self._run_test(test_function)
def test_absolute_uri(self):
"""Tests absolute uri request."""
options = self._options
options.resource = 'ws://localhost:%d/echo' % options.server_port
self._run_test_with_options(_echo_check_procedure, options)
def test_invalid_absolute_uri(self):
"""Tests invalid absolute uri request."""
options = self._options
options.resource = 'ws://invalidlocalhost:%d/echo' % options.server_port
options.server_stderr = subprocess.PIPE
self._run_http_fallback_test(options, 404)
def test_origin_check(self):
"""Tests http fallback on origin check fail."""
options = self._options
options.resource = '/origin_check'
# Server shows warning message for http 403 fallback. This warning
# message is confusing. Following pipe disposes warning messages.
self.server_stderr = subprocess.PIPE
self._run_http_fallback_test(options, 403)
def test_invalid_resource(self):
"""Tests invalid resource path."""
options = self._options
options.resource = '/no_resource'
self.server_stderr = subprocess.PIPE
self._run_http_fallback_test(options, 404)
def test_fragmentized_resource(self):
"""Tests resource name with fragment"""
options = self._options
options.resource = '/echo#fragment'
self.server_stderr = subprocess.PIPE
self._run_http_fallback_test(options, 400)
def test_version_check(self):
"""Tests http fallback on version check fail."""
options = self._options
options.version = 99
self._run_http_fallback_test(options, 400)
def test_basic_auth_connection(self):
"""Test successful basic auth"""
options = self._options
options.use_basic_auth = True
self.server_stderr = subprocess.PIPE
self._run_test_with_options(_check_handshake_with_basic_auth,
options,
server_options=['--basic-auth'])
def test_invalid_basic_auth_connection(self):
"""Tests basic auth with invalid credentials"""
options = self._options
options.use_basic_auth = True
options.basic_auth_credential = 'invalid:test'
self.server_stderr = subprocess.PIPE
with self.assertRaises(client_for_testing.HttpStatusException) as e:
self._run_test_with_options(_check_handshake_with_basic_auth,
options,
server_options=['--basic-auth'])
self.assertEqual(101, e.exception.status)
class EndToEndTestWithEchoClient(EndToEndTestBase):
def setUp(self):
EndToEndTestBase.setUp(self)
def _check_example_echo_client_result(self, expected, stdoutdata,
stderrdata):
actual = stdoutdata.decode(locale.getpreferredencoding())
# In Python 3 on Windows we get "\r\n" terminators back from
# the subprocess and we need to replace them with "\n" to get
# a match. This is a bit of a hack, but avoids platform- and
# version- specific code.
actual = actual.replace('\r\n', '\n')
if actual != expected:
raise Exception('Unexpected result on example echo client: '
'%r (expected) vs %r (actual)' %
(expected, actual))
if stderrdata is not None:
raise Exception('Unexpected error message on example echo '
'client: %r' % stderrdata)
def test_example_echo_client(self):
"""Tests that the echo_client.py example can talk with the server."""
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client_command = os.path.join(self.top_dir, 'example',
'echo_client.py')
# Expected output for the default messages.
default_expectation = (u'Send: Hello\n'
u'Recv: Hello\n'
u'Send: <>\n'
u'Recv: <>\n'
u'Send close\n'
u'Recv ack\n')
args = [client_command, '-p', str(self._options.server_port)]
client = self._run_python_command(args, stdout=subprocess.PIPE)
stdoutdata, stderrdata = client.communicate()
self._check_example_echo_client_result(default_expectation,
stdoutdata, stderrdata)
# Process a big message for which extended payload length is used.
# To handle extended payload length, ws_version attribute will be
# accessed. This test checks that ws_version is correctly set.
big_message = 'a' * 1024
args = [
client_command, '-p',
str(self._options.server_port), '-m', big_message
]
client = self._run_python_command(args, stdout=subprocess.PIPE)
stdoutdata, stderrdata = client.communicate()
expected = ('Send: %s\nRecv: %s\nSend close\nRecv ack\n' %
(big_message, big_message))
self._check_example_echo_client_result(expected, stdoutdata,
stderrdata)
# Test the permessage-deflate extension.
args = [
client_command, '-p',
str(self._options.server_port), '--use_permessage_deflate'
]
client = self._run_python_command(args, stdout=subprocess.PIPE)
stdoutdata, stderrdata = client.communicate()
self._check_example_echo_client_result(default_expectation,
stdoutdata, stderrdata)
finally:
self._close_server(server)
class EndToEndTestWithCgi(EndToEndTestBase):
def setUp(self):
EndToEndTestBase.setUp(self)
def test_cgi(self):
"""Verifies that CGI scripts work."""
server = self._run_server(extra_args=['--cgi-paths', '/cgi-bin'])
time.sleep(_SERVER_WARMUP_IN_SEC)
url = 'http://localhost:%d/cgi-bin/hi.py' % self._options.server_port
# urlopen() in Python 2.7 doesn't support "with".
try:
f = urllib.request.urlopen(url)
except:
self._close_server(server)
raise
try:
self.assertEqual(f.getcode(), 200)
self.assertEqual(f.info().get('Content-Type'), 'text/plain')
body = f.read()
self.assertEqual(body.rstrip(b'\r\n'), b'Hi from hi.py')
finally:
f.close()
self._close_server(server)
if __name__ == '__main__':
unittest.main()
# vi:sts=4 sw=4 et
| mpl-2.0 |
kuba1/qtcreator | tests/system/suite_CSUP/tst_CSUP04/test.py | 5 | 4135 | #############################################################################
##
## Copyright (C) 2015 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms and
## conditions see http://www.qt.io/terms-conditions. For further information
## use the contact form at http://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 or version 3 as published by the Free
## Software Foundation and appearing in the file LICENSE.LGPLv21 and
## LICENSE.LGPLv3 included in the packaging of this file. Please review the
## following information to ensure the GNU Lesser General Public License
## requirements will be met: https://www.gnu.org/licenses/lgpl.html and
## http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, The Qt Company gives you certain additional
## rights. These rights are described in The Qt Company LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
# entry of test
def main():
# prepare example project
sourceExample = os.path.abspath(sdkPath + "/Examples/4.7/declarative/animation/basics/property-animation")
proFile = "propertyanimation.pro"
if not neededFilePresent(os.path.join(sourceExample, proFile)):
return
# copy example project to temp directory
templateDir = prepareTemplate(sourceExample)
examplePath = os.path.join(templateDir, proFile)
startCreatorTryingClang()
if not startedWithoutPluginError():
return
# open example project
openQmakeProject(examplePath)
# wait for parsing to complete
progressBarWait(30000)
models = iterateAvailableCodeModels()
for current in models:
if current != models[0]:
selectCodeModel(current)
test.log("Testing code model: %s" % current)
# open .cpp file in editor
if not openDocument("propertyanimation.Sources.main\\.cpp"):
test.fatal("Could not open main.cpp")
invokeMenuItem("File", "Exit")
return
test.verify(checkIfObjectExists(":Qt Creator_CppEditor::Internal::CPPEditorWidget"),
"Verifying if: .cpp file is opened in Edit mode.")
# place cursor on line "QmlApplicationViewer viewer;"
editorWidget = findObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget")
# invoke find usages from context menu on word "viewer"
if not invokeFindUsage(editorWidget, "QmlApplicationViewer viewer;", "<Left>", 10):
invokeMenuItem("File", "Exit")
return
# wait until search finished and verify search results
waitForSearchResults()
validateSearchResult(14)
result = re.search("QmlApplicationViewer", str(editorWidget.plainText))
test.verify(result, "Verifying if: The list of all usages of the selected text is displayed in Search Results. "
"File with used text is opened.")
# move cursor to the other word and test Find Usages function by pressing Ctrl+Shift+U.
openDocument("propertyanimation.Sources.main\\.cpp")
if not placeCursorToLine(editorWidget, "viewer.setOrientation(QmlApplicationViewer::ScreenOrientationAuto);"):
return
for i in range(4):
type(editorWidget, "<Left>")
type(editorWidget, "<Ctrl+Shift+u>")
# wait until search finished and verify search results
waitForSearchResults()
validateSearchResult(3)
invokeMenuItem("File", "Close All")
invokeMenuItem("File", "Exit")
| lgpl-2.1 |
juliojsb/sarviewer | plotters/matplotlib/swap.py | 1 | 2062 | #!/usr/bin/env python2
"""
Author :Julio Sanz
Website :www.elarraydejota.com
Email :[email protected]
Description :Script to create a graph about swap usage
Dependencies :Python 2.x, matplotlib
Usage :python swap.py
License :GPLv3
"""
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import csv
from datetime import datetime
import matplotlib.dates
# ======================
# VARIABLES
# ======================
# Aesthetic parameters
plt.rcParams.update({'font.size': 8})
plt.rcParams['lines.linewidth'] = 1.5
time_format = matplotlib.dates.DateFormatter('%H:%M:%S')
plt.gca().xaxis.set_major_formatter(time_format)
plt.gcf().autofmt_xdate()
# Time (column 0)
x = []
# Data arrays
swap_free = []
swap_used = []
# ======================
# FUNCTIONS
# ======================
def generate_graph():
with open('../../data/swap.dat', 'r') as csvfile:
data_source = csv.reader(csvfile, delimiter=' ', skipinitialspace=True)
for row in data_source:
# [0] column is a time column
# Convert to datetime data type
a = datetime.strptime((row[0]),'%H:%M:%S')
x.append((a))
# The remaining columns contain data
swap_free.append(str(int(row[1])/1024))
swap_used.append(str(int(row[2])/1024))
# Plot lines
plt.plot(x,swap_used, label='Used', color='r', antialiased=True)
plt.plot(x,swap_free, label='Free', color='g', antialiased=True)
# Graph properties
plt.xlabel('Time',fontstyle='italic')
plt.ylabel('SWAP (MB)',fontstyle='italic')
plt.title('SWAP usage graph')
plt.grid(linewidth=0.4, antialiased=True)
plt.legend(loc='upper center', bbox_to_anchor=(0.5, -0.15), ncol=2, fancybox=True, shadow=True)
plt.autoscale(True)
# Graph saved to PNG file
plt.savefig('../../graphs/swap.png', bbox_inches='tight')
#plt.show()
# ======================
# MAIN
# ======================
if __name__ == '__main__':
generate_graph() | gpl-3.0 |
kirca/OpenUpgrade | addons/account_analytic_plans/wizard/analytic_plan_create_model.py | 384 | 2829 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class analytic_plan_create_model(osv.osv_memory):
_name = "analytic.plan.create.model"
_description = "analytic.plan.create.model"
def activate(self, cr, uid, ids, context=None):
plan_obj = self.pool.get('account.analytic.plan.instance')
mod_obj = self.pool.get('ir.model.data')
anlytic_plan_obj = self.pool.get('account.analytic.plan')
if context is None:
context = {}
if 'active_id' in context and context['active_id']:
plan = plan_obj.browse(cr, uid, context['active_id'], context=context)
if (not plan.name) or (not plan.code):
raise osv.except_osv(_('Error!'), _('Please put a name and a code before saving the model.'))
pids = anlytic_plan_obj.search(cr, uid, [], context=context)
if not pids:
raise osv.except_osv(_('Error!'), _('There is no analytic plan defined.'))
plan_obj.write(cr, uid, [context['active_id']], {'plan_id':pids[0]}, context=context)
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'),('name', '=', 'view_analytic_plan_create_model')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {
'name': _('Distribution Model Saved'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'analytic.plan.create.model',
'views': [(resource_id,'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
else:
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kaynfiretvguru/Eldritch | plugin.program.echowizard/resources/lib/log/uploaders/dropbox.py | 6 | 3174 | """
TVAddons Log Uploader Script
Copyright (C) 2016 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import time
import uploader
from uploader import UploaderError
from .. import dropbox_api
from .. import db_auth
from ..db_auth import AUTH_URL
from .. import kodi
from ..kodi import i18n
from .. import log_utils
APP_KEY = '6943gzynff6zkcz'
APP_SECRET = 'fp8d96951grzf78'
INTERVALS = 5
class DropboxUploader(uploader.Uploader):
name = 'dropbox'
def upload_log(self, log, name=None):
if name is None: name = 'kodi.log'
token = kodi.get_setting('dropbox_token')
if not token:
token = self.__authorize()
try:
if token:
full_path = '/%s' % (name)
db = dropbox_api.DropboxClient(token)
db.upload_file(full_path, log, overwrite=True)
res = db.share(full_path)
return res.get('url')
except dropbox_api.ErrorResponse as e:
raise UploaderError('Upload Failed: (%s): %s' % (e.status, e.reason))
def __authorize(self):
auth_flow = dropbox_api.DropboxOAuth2Flow(APP_KEY, APP_SECRET)
authorize_url = auth_flow.start()
with db_auth.DbAuth(authorize_url) as auth:
result = auth.start_session()
if result is None:
raise UploaderError('Unable to start db auth session')
else:
pin, redirect_uri = result
line1 = i18n('dropbox_visit') % (AUTH_URL)
line2 = i18n('dropbox_pin') % (pin)
line3 = i18n('directions')
with kodi.CountdownDialog(i18n('dropbox_auth'), line1=line1, line2=line2, line3=line3) as cd:
token = cd.start(self.__check_auth, [pin, auth, auth_flow, redirect_uri])
if token:
return token
raise UploaderError('Authorization Time Out')
def __check_auth(self, pin, auth, auth_flow, redirect_uri):
result = auth.get_code(pin)
if result.get('success') and result.get('auth_code'):
try:
access_token, _user_id = auth_flow.finish(result['auth_code'], redirect_uri)
kodi.set_setting('dropbox_token', access_token)
return access_token
except dropbox_api.ErrorResponse as e:
raise UploaderError('Authorization Failed (%s): %s' % (e.status, e.reason))
def send_email(self, email, results):
return None
| gpl-2.0 |
ketjow4/NOV | Lib/site-packages/numpy/distutils/command/build_scripts.py | 100 | 1665 | """ Modified version of build_scripts that handles building scripts from functions.
"""
from distutils.command.build_scripts import build_scripts as old_build_scripts
from numpy.distutils import log
from numpy.distutils.misc_util import is_string
class build_scripts(old_build_scripts):
def generate_scripts(self, scripts):
new_scripts = []
func_scripts = []
for script in scripts:
if is_string(script):
new_scripts.append(script)
else:
func_scripts.append(script)
if not func_scripts:
return new_scripts
build_dir = self.build_dir
self.mkpath(build_dir)
for func in func_scripts:
script = func(build_dir)
if not script:
continue
if is_string(script):
log.info(" adding '%s' to scripts" % (script,))
new_scripts.append(script)
else:
[log.info(" adding '%s' to scripts" % (s,)) for s in script]
new_scripts.extend(list(script))
return new_scripts
def run (self):
if not self.scripts:
return
self.scripts = self.generate_scripts(self.scripts)
# Now make sure that the distribution object has this list of scripts.
# setuptools' develop command requires that this be a list of filenames,
# not functions.
self.distribution.scripts = self.scripts
return old_build_scripts.run(self)
def get_source_files(self):
from numpy.distutils.misc_util import get_script_files
return get_script_files(self.scripts)
| gpl-3.0 |
maxsocl/django | tests/template_tests/utils.py | 36 | 4915 | # coding: utf-8
from __future__ import unicode_literals
import functools
import os
from django import template
from django.template import Library
from django.template.base import libraries
from django.template.engine import Engine
from django.test.utils import override_settings
from django.utils._os import upath
from django.utils.encoding import python_2_unicode_compatible
from django.utils.safestring import mark_safe
ROOT = os.path.dirname(os.path.abspath(upath(__file__)))
TEMPLATE_DIR = os.path.join(ROOT, 'templates')
def setup(templates, *args, **kwargs):
"""
Runs test method multiple times in the following order:
debug cached string_if_invalid
----- ------ -----------------
False False
False True
False False INVALID
False True INVALID
True False
True True
"""
# when testing deprecation warnings, it's useful to run just one test since
# the message won't be displayed multiple times
test_once = kwargs.get('test_once', False)
for arg in args:
templates.update(arg)
# numerous tests make use of an inclusion tag
# add this in here for simplicity
templates["inclusion.html"] = "{{ result }}"
loaders = [
('django.template.loaders.cached.Loader', [
('django.template.loaders.locmem.Loader', templates),
]),
]
def decorator(func):
@register_test_tags
# Make Engine.get_default() raise an exception to ensure that tests
# are properly isolated from Django's global settings.
@override_settings(TEMPLATES=None)
@functools.wraps(func)
def inner(self):
self.engine = Engine(
allowed_include_roots=[ROOT],
loaders=loaders,
)
func(self)
if test_once:
return
func(self)
self.engine = Engine(
allowed_include_roots=[ROOT],
loaders=loaders,
string_if_invalid='INVALID',
)
func(self)
func(self)
self.engine = Engine(
allowed_include_roots=[ROOT],
debug=True,
loaders=loaders,
)
func(self)
func(self)
return inner
return decorator
# Custom template tag for tests
register = Library()
class EchoNode(template.Node):
def __init__(self, contents):
self.contents = contents
def render(self, context):
return ' '.join(self.contents)
@register.tag
def echo(parser, token):
return EchoNode(token.contents.split()[1:])
register.tag('other_echo', echo)
@register.filter
def upper(value):
return value.upper()
def register_test_tags(func):
@functools.wraps(func)
def inner(self):
libraries['testtags'] = register
try:
func(self)
finally:
del libraries['testtags']
return inner
# Helper objects
class SomeException(Exception):
silent_variable_failure = True
class SomeOtherException(Exception):
pass
class ShouldNotExecuteException(Exception):
pass
class SomeClass:
def __init__(self):
self.otherclass = OtherClass()
def method(self):
return 'SomeClass.method'
def method2(self, o):
return o
def method3(self):
raise SomeException
def method4(self):
raise SomeOtherException
def method5(self):
raise TypeError
def __getitem__(self, key):
if key == 'silent_fail_key':
raise SomeException
elif key == 'noisy_fail_key':
raise SomeOtherException
raise KeyError
@property
def silent_fail_attribute(self):
raise SomeException
@property
def noisy_fail_attribute(self):
raise SomeOtherException
@property
def attribute_error_attribute(self):
raise AttributeError
class OtherClass:
def method(self):
return 'OtherClass.method'
class TestObj(object):
def is_true(self):
return True
def is_false(self):
return False
def is_bad(self):
raise ShouldNotExecuteException()
class SilentGetItemClass(object):
def __getitem__(self, key):
raise SomeException
class SilentAttrClass(object):
def b(self):
raise SomeException
b = property(b)
@python_2_unicode_compatible
class UTF8Class:
"Class whose __str__ returns non-ASCII data on Python 2"
def __str__(self):
return 'ŠĐĆŽćžšđ'
# These two classes are used to test auto-escaping of unicode output.
@python_2_unicode_compatible
class UnsafeClass:
def __str__(self):
return 'you & me'
@python_2_unicode_compatible
class SafeClass:
def __str__(self):
return mark_safe('you > me')
| bsd-3-clause |
chenjun0210/tensorflow | tensorflow/contrib/tensor_forest/hybrid/python/models/decisions_to_data_then_nn.py | 190 | 1889 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A model that places a decision tree embedding before a neural net."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.hybrid.python import hybrid_model
from tensorflow.contrib.tensor_forest.hybrid.python.layers import decisions_to_data
from tensorflow.contrib.tensor_forest.hybrid.python.layers import fully_connected
from tensorflow.python.training import adagrad
class DecisionsToDataThenNN(hybrid_model.HybridModel):
"""A model that places a decision tree embedding before a neural net."""
def __init__(self,
params,
device_assigner=None,
optimizer_class=adagrad.AdagradOptimizer,
**kwargs):
super(DecisionsToDataThenNN, self).__init__(
params,
device_assigner=device_assigner,
optimizer_class=optimizer_class,
**kwargs)
self.layers = [decisions_to_data.DecisionsToDataLayer(params,
0, device_assigner),
fully_connected.FullyConnectedLayer(
params, 1, device_assigner=device_assigner)]
| apache-2.0 |
wubr2000/googleads-python-lib | examples/dfp/v201502/proposal_service/create_proposals.py | 4 | 2557 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new proposals.
To determine which proposals exist, run get_all_proposals.py."""
import uuid
# Import appropriate modules from the client library.
from googleads import dfp
ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE'
PRIMARY_SALESPERSON_ID = 'INSERT_PRIMARY_SALESPERSON_ID_HERE'
SECONDARY_SALESPERSON_ID = 'INSERT_SECONDARY_SALESPERSON_ID_HERE'
PRIMARY_TRAFFICKER_ID = 'INSERT_PRIMARY_TRAFFICKER_ID_HERE'
def main(client, advertiser_id, primary_salesperson_id,
secondary_salesperson_id, primary_trafficker_id):
# Initialize appropriate services.
proposal_service = client.GetService('ProposalService', version='v201502')
network_service = client.GetService('NetworkService', version='v201502')
# Create proposal objects.
proposal = {
'name': 'Proposal #%s' % uuid.uuid4(),
'advertiser': {
'companyId': advertiser_id,
'type': 'ADVERTISER'
},
'primarySalesperson': {
'userId': primary_salesperson_id,
'split': '75000'
},
'secondarySalespeople': [{
'userId': secondary_salesperson_id,
'split': '25000'
}],
'primaryTraffickerId': primary_trafficker_id,
'probabilityOfClose': '100000',
'budget': {
'microAmount': '100000000',
'currencyCode': network_service.getCurrentNetwork()['currencyCode']
},
'billingCap': 'CAPPED_CUMULATIVE',
'billingSource': 'DFP_VOLUME'
}
# Add proposals.
proposals = proposal_service.createProposals([proposals])
# Display results.
for proposal in proposals:
print ('Proposal with id \'%s\' and name \'%s\' was created.'
% (proposal['id'], proposal['name']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, ADVERTISER_ID, PRIMARY_SALESPERSON_ID,
SECONDARY_SALESPERSON_ID, PRIMARY_TRAFFICKER_ID)
| apache-2.0 |
ryfeus/lambda-packs | Sklearn_scipy_numpy/source/sklearn/feature_selection/rfe.py | 6 | 17502 | # Authors: Alexandre Gramfort <[email protected]>
# Vincent Michel <[email protected]>
# Gilles Louppe <[email protected]>
#
# License: BSD 3 clause
"""Recursive feature elimination for feature ranking"""
import warnings
import numpy as np
from ..utils import check_X_y, safe_sqr
from ..utils.metaestimators import if_delegate_has_method
from ..base import BaseEstimator
from ..base import MetaEstimatorMixin
from ..base import clone
from ..base import is_classifier
from ..cross_validation import check_cv
from ..cross_validation import _safe_split, _score
from ..metrics.scorer import check_scoring
from .base import SelectorMixin
class RFE(BaseEstimator, MetaEstimatorMixin, SelectorMixin):
"""Feature ranking with recursive feature elimination.
Given an external estimator that assigns weights to features (e.g., the
coefficients of a linear model), the goal of recursive feature elimination
(RFE) is to select features by recursively considering smaller and smaller
sets of features. First, the estimator is trained on the initial set of
features and weights are assigned to each one of them. Then, features whose
absolute weights are the smallest are pruned from the current set features.
That procedure is recursively repeated on the pruned set until the desired
number of features to select is eventually reached.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : object
A supervised learning estimator with a `fit` method that updates a
`coef_` attribute that holds the fitted parameters. Important features
must correspond to high absolute values in the `coef_` array.
For instance, this is the case for most supervised learning
algorithms such as Support Vector Classifiers and Generalized
Linear Models from the `svm` and `linear_model` modules.
n_features_to_select : int or None (default=None)
The number of features to select. If `None`, half of the features
are selected.
step : int or float, optional (default=1)
If greater than or equal to 1, then `step` corresponds to the (integer)
number of features to remove at each iteration.
If within (0.0, 1.0), then `step` corresponds to the percentage
(rounded down) of features to remove at each iteration.
estimator_params : dict
Parameters for the external estimator.
This attribute is deprecated as of version 0.16 and will be removed in
0.18. Use estimator initialisation or set_params method instead.
verbose : int, default=0
Controls verbosity of output.
Attributes
----------
n_features_ : int
The number of selected features.
support_ : array of shape [n_features]
The mask of selected features.
ranking_ : array of shape [n_features]
The feature ranking, such that ``ranking_[i]`` corresponds to the
ranking position of the i-th feature. Selected (i.e., estimated
best) features are assigned rank 1.
estimator_ : object
The external estimator fit on the reduced dataset.
Examples
--------
The following example shows how to retrieve the 5 right informative
features in the Friedman #1 dataset.
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.feature_selection import RFE
>>> from sklearn.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFE(estimator, 5, step=1)
>>> selector = selector.fit(X, y)
>>> selector.support_ # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, True, True,
False, False, False, False, False], dtype=bool)
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, n_features_to_select=None, step=1,
estimator_params=None, verbose=0):
self.estimator = estimator
self.n_features_to_select = n_features_to_select
self.step = step
self.estimator_params = estimator_params
self.verbose = verbose
@property
def _estimator_type(self):
return self.estimator._estimator_type
def fit(self, X, y):
"""Fit the RFE model and then the underlying estimator on the selected
features.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples]
The target values.
"""
return self._fit(X, y)
def _fit(self, X, y, step_score=None):
X, y = check_X_y(X, y, "csc")
# Initialization
n_features = X.shape[1]
if self.n_features_to_select is None:
n_features_to_select = n_features // 2
else:
n_features_to_select = self.n_features_to_select
if 0.0 < self.step < 1.0:
step = int(max(1, self.step * n_features))
else:
step = int(self.step)
if step <= 0:
raise ValueError("Step must be >0")
if self.estimator_params is not None:
warnings.warn("The parameter 'estimator_params' is deprecated as "
"of version 0.16 and will be removed in 0.18. The "
"parameter is no longer necessary because the value "
"is set via the estimator initialisation or "
"set_params method.", DeprecationWarning)
support_ = np.ones(n_features, dtype=np.bool)
ranking_ = np.ones(n_features, dtype=np.int)
if step_score:
self.scores_ = []
# Elimination
while np.sum(support_) > n_features_to_select:
# Remaining features
features = np.arange(n_features)[support_]
# Rank the remaining features
estimator = clone(self.estimator)
if self.estimator_params:
estimator.set_params(**self.estimator_params)
if self.verbose > 0:
print("Fitting estimator with %d features." % np.sum(support_))
estimator.fit(X[:, features], y)
# Get coefs
if hasattr(estimator, 'coef_'):
coefs = estimator.coef_
elif hasattr(estimator, 'feature_importances_'):
coefs = estimator.feature_importances_
else:
raise RuntimeError('The classifier does not expose '
'"coef_" or "feature_importances_" '
'attributes')
# Get ranks
if coefs.ndim > 1:
ranks = np.argsort(safe_sqr(coefs).sum(axis=0))
else:
ranks = np.argsort(safe_sqr(coefs))
# for sparse case ranks is matrix
ranks = np.ravel(ranks)
# Eliminate the worse features
threshold = min(step, np.sum(support_) - n_features_to_select)
# Compute step score on the previous selection iteration
# because 'estimator' must use features
# that have not been eliminated yet
if step_score:
self.scores_.append(step_score(estimator, features))
support_[features[ranks][:threshold]] = False
ranking_[np.logical_not(support_)] += 1
# Set final attributes
features = np.arange(n_features)[support_]
self.estimator_ = clone(self.estimator)
if self.estimator_params:
self.estimator_.set_params(**self.estimator_params)
self.estimator_.fit(X[:, features], y)
# Compute step score when only n_features_to_select features left
if step_score:
self.scores_.append(step_score(self.estimator_, features))
self.n_features_ = support_.sum()
self.support_ = support_
self.ranking_ = ranking_
return self
@if_delegate_has_method(delegate='estimator')
def predict(self, X):
"""Reduce X to the selected features and then predict using the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape [n_samples]
The predicted target values.
"""
return self.estimator_.predict(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def score(self, X, y):
"""Reduce X to the selected features and then return the score of the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
y : array of shape [n_samples]
The target values.
"""
return self.estimator_.score(self.transform(X), y)
def _get_support_mask(self):
return self.support_
@if_delegate_has_method(delegate='estimator')
def decision_function(self, X):
return self.estimator_.decision_function(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_proba(self, X):
return self.estimator_.predict_proba(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_log_proba(self, X):
return self.estimator_.predict_log_proba(self.transform(X))
class RFECV(RFE, MetaEstimatorMixin):
"""Feature ranking with recursive feature elimination and cross-validated
selection of the best number of features.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : object
A supervised learning estimator with a `fit` method that updates a
`coef_` attribute that holds the fitted parameters. Important features
must correspond to high absolute values in the `coef_` array.
For instance, this is the case for most supervised learning
algorithms such as Support Vector Classifiers and Generalized
Linear Models from the `svm` and `linear_model` modules.
step : int or float, optional (default=1)
If greater than or equal to 1, then `step` corresponds to the (integer)
number of features to remove at each iteration.
If within (0.0, 1.0), then `step` corresponds to the percentage
(rounded down) of features to remove at each iteration.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`StratifiedKFold` used. If the estimator is a classifier
or if ``y`` is neither binary nor multiclass, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
scoring : string, callable or None, optional, default: None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
estimator_params : dict
Parameters for the external estimator.
This attribute is deprecated as of version 0.16 and will be removed in
0.18. Use estimator initialisation or set_params method instead.
verbose : int, default=0
Controls verbosity of output.
Attributes
----------
n_features_ : int
The number of selected features with cross-validation.
support_ : array of shape [n_features]
The mask of selected features.
ranking_ : array of shape [n_features]
The feature ranking, such that `ranking_[i]`
corresponds to the ranking
position of the i-th feature.
Selected (i.e., estimated best)
features are assigned rank 1.
grid_scores_ : array of shape [n_subsets_of_features]
The cross-validation scores such that
``grid_scores_[i]`` corresponds to
the CV score of the i-th subset of features.
estimator_ : object
The external estimator fit on the reduced dataset.
Notes
-----
The size of ``grid_scores_`` is equal to ceil((n_features - 1) / step) + 1,
where step is the number of features removed at each iteration.
Examples
--------
The following example shows how to retrieve the a-priori not known 5
informative features in the Friedman #1 dataset.
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.feature_selection import RFECV
>>> from sklearn.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFECV(estimator, step=1, cv=5)
>>> selector = selector.fit(X, y)
>>> selector.support_ # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, True, True,
False, False, False, False, False], dtype=bool)
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, step=1, cv=None, scoring=None,
estimator_params=None, verbose=0):
self.estimator = estimator
self.step = step
self.cv = cv
self.scoring = scoring
self.estimator_params = estimator_params
self.verbose = verbose
def fit(self, X, y):
"""Fit the RFE model and automatically tune the number of selected
features.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vector, where `n_samples` is the number of samples and
`n_features` is the total number of features.
y : array-like, shape = [n_samples]
Target values (integers for classification, real numbers for
regression).
"""
X, y = check_X_y(X, y, "csr")
if self.estimator_params is not None:
warnings.warn("The parameter 'estimator_params' is deprecated as "
"of version 0.16 and will be removed in 0.18. "
"The parameter is no longer necessary because the "
"value is set via the estimator initialisation or "
"set_params method.", DeprecationWarning)
# Initialization
cv = check_cv(self.cv, X, y, is_classifier(self.estimator))
scorer = check_scoring(self.estimator, scoring=self.scoring)
n_features = X.shape[1]
n_features_to_select = 1
# Determine the number of subsets of features
scores = []
# Cross-validation
for n, (train, test) in enumerate(cv):
X_train, y_train = _safe_split(self.estimator, X, y, train)
X_test, y_test = _safe_split(self.estimator, X, y, test, train)
rfe = RFE(estimator=self.estimator,
n_features_to_select=n_features_to_select,
step=self.step, estimator_params=self.estimator_params,
verbose=self.verbose - 1)
rfe._fit(X_train, y_train, lambda estimator, features:
_score(estimator, X_test[:, features], y_test, scorer))
scores.append(np.array(rfe.scores_[::-1]).reshape(1, -1))
scores = np.sum(np.concatenate(scores, 0), 0)
# The index in 'scores' when 'n_features' features are selected
n_feature_index = np.ceil((n_features - n_features_to_select) /
float(self.step))
n_features_to_select = max(n_features_to_select,
n_features - ((n_feature_index -
np.argmax(scores)) *
self.step))
# Re-execute an elimination with best_k over the whole set
rfe = RFE(estimator=self.estimator,
n_features_to_select=n_features_to_select,
step=self.step, estimator_params=self.estimator_params)
rfe.fit(X, y)
# Set final attributes
self.support_ = rfe.support_
self.n_features_ = rfe.n_features_
self.ranking_ = rfe.ranking_
self.estimator_ = clone(self.estimator)
if self.estimator_params:
self.estimator_.set_params(**self.estimator_params)
self.estimator_.fit(self.transform(X), y)
# Fixing a normalization error, n is equal to len(cv) - 1
# here, the scores are normalized by len(cv)
self.grid_scores_ = scores / len(cv)
return self
| mit |
narry/odenos | src/test/python/org/o3project/odenos/remoteobject/transport/test_local_message_transport.py | 6 | 2258 | # -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.remoteobject.message.request import Request
from org.o3project.odenos.remoteobject.message.response import Response
from org.o3project.odenos.remoteobject.transport.local_message_transport\
import LocalMessageTransport
import unittest
from mock import MagicMock
class RemoteMessageTransportTest(unittest.TestCase):
Dispatcher = MagicMock()
def setUp(self):
self.target = LocalMessageTransport(
"LocalMessageTransport",
self.Dispatcher)
def tearDown(self):
self.target = None
def test_constructor(self):
self.assertEqual(self.target.object_id, "LocalMessageTransport")
self.assertEqual(self.target.dispatcher, self.Dispatcher)
def test_send_request_message(self):
request = Request("object_id", "method", "path")
response = Response(200, "body")
self.target.dispatcher.dispatch_request =\
MagicMock(return_value=response)
result = self.target.send_request_message(request)
self.assertEqual(result.status_code, 200)
self.assertEqual(result.body, "body")
def test_close(self):
self.target.close()
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
takeshineshiro/keystone | keystone/contrib/revoke/backends/kvs.py | 8 | 2338 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_config import cfg
from oslo_log import versionutils
from oslo_utils import timeutils
from keystone.common import kvs
from keystone.contrib import revoke
from keystone import exception
CONF = cfg.CONF
_EVENT_KEY = 'os-revoke-events'
_KVS_BACKEND = 'openstack.kvs.Memory'
class Revoke(revoke.RevokeDriverV8):
@versionutils.deprecated(
versionutils.deprecated.JUNO,
in_favor_of='keystone.contrib.revoke.backends.sql',
remove_in=+1,
what='keystone.contrib.revoke.backends.kvs')
def __init__(self, **kwargs):
super(Revoke, self).__init__()
self._store = kvs.get_key_value_store('os-revoke-driver')
self._store.configure(backing_store=_KVS_BACKEND, **kwargs)
def _list_events(self):
try:
return self._store.get(_EVENT_KEY)
except exception.NotFound:
return []
def list_events(self, last_fetch=None):
results = []
with self._store.get_lock(_EVENT_KEY):
events = self._list_events()
for event in events:
revoked_at = event.revoked_at
if last_fetch is None or revoked_at > last_fetch:
results.append(event)
return results
def revoke(self, event):
pruned = []
expire_delta = datetime.timedelta(seconds=CONF.token.expiration)
oldest = timeutils.utcnow() - expire_delta
with self._store.get_lock(_EVENT_KEY) as lock:
events = self._list_events()
if event:
events.append(event)
for event in events:
revoked_at = event.revoked_at
if revoked_at > oldest:
pruned.append(event)
self._store.set(_EVENT_KEY, pruned, lock)
| apache-2.0 |
mulkieran/storage_alerts | tests/sources/generic/by_line/states_test.py | 1 | 1346 | # Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Anne Mulhern <[email protected]>
""" Test for managers. """
import unittest
from storage_alerts.sources.generic.by_line.states import RecognizerStates
class StatesTestCase(unittest.TestCase):
""" Exercise states methods. """
def testStr(self):
""" Exercise a state's __str__ method. """
self.assertIsNotNone(str(RecognizerStates.MAYBE_NO))
| gpl-2.0 |
hn8841182/W11 | static/Brython3.1.1-20150328-091302/Lib/xml/etree/ElementInclude.py | 784 | 5146 | #
# ElementTree
# $Id: ElementInclude.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xinclude support for element trees
#
# history:
# 2003-08-15 fl created
# 2003-11-14 fl fixed default loader
#
# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2008 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
##
# Limited XInclude support for the ElementTree package.
##
import copy
from . import ElementTree
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
##
# Default loader. This loader reads an included resource from disk.
#
# @param href Resource reference.
# @param parse Parse mode. Either "xml" or "text".
# @param encoding Optional text encoding (UTF-8 by default for "text").
# @return The expanded resource. If the parse mode is "xml", this
# is an ElementTree instance. If the parse mode is "text", this
# is a Unicode string. If the loader fails, it can return None
# or raise an IOError exception.
# @throws IOError If the loader fails to load the resource.
def default_loader(href, parse, encoding=None):
if parse == "xml":
file = open(href, 'rb')
data = ElementTree.parse(file).getroot()
else:
if not encoding:
encoding = 'UTF-8'
file = open(href, 'r', encoding=encoding)
data = file.read()
file.close()
return data
##
# Expand XInclude directives.
#
# @param elem Root element.
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as <b>default_loader</b>.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
# @throws IOError If the function fails to load a given resource.
def include(elem, loader=None):
if loader is None:
loader = default_loader
# look for xinclude elements
i = 0
while i < len(elem):
e = elem[i]
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("href")
parse = e.get("parse", "xml")
if parse == "xml":
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
node = copy.copy(node)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
elif parse == "text":
text = loader(href, parse, e.get("encoding"))
if text is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
if i:
node = elem[i-1]
node.tail = (node.tail or "") + text + (e.tail or "")
else:
elem.text = (elem.text or "") + text + (e.tail or "")
del elem[i]
continue
else:
raise FatalIncludeError(
"unknown parse type in xi:include tag (%r)" % parse
)
elif e.tag == XINCLUDE_FALLBACK:
raise FatalIncludeError(
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
include(e, loader)
i = i + 1
| gpl-3.0 |
alacritythief/django-allauth | allauth/socialaccount/south_migrations/0011_auto__chg_field_socialtoken_token.py | 77 | 6468 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'SocialToken.token'
db.alter_column('socialaccount_socialtoken', 'token', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Changing field 'SocialToken.token'
db.alter_column('socialaccount_socialtoken', 'token', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'socialaccount.socialaccount': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'socialaccount.socialapp': {
'Meta': {'object_name': 'SocialApp'},
'client_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'})
},
'socialaccount.socialtoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialApp']"}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.TextField', [], {}),
'token_secret': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['socialaccount'] | mit |
rogerwang/chromium | tools/python/google/path_utils.py | 191 | 2910 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Some utility methods for getting and manipulating paths."""
# TODO(pamg): Have the buildbot use these, too.
import errno
import os
import sys
class PathNotFound(Exception): pass
def ScriptDir():
"""Get the full path to the directory containing the current script."""
script_filename = os.path.abspath(sys.argv[0])
return os.path.dirname(script_filename)
def FindAncestor(start_dir, ancestor):
"""Finds an ancestor dir in a path.
For example, FindAncestor('c:\foo\bar\baz', 'bar') would return
'c:\foo\bar'. Unlike FindUpward*, this only looks at direct path ancestors.
"""
start_dir = os.path.abspath(start_dir)
path = start_dir
while True:
(parent, tail) = os.path.split(path)
if tail == ancestor:
return path
if not tail:
break
path = parent
raise PathNotFound("Unable to find ancestor %s in %s" % (ancestor, start_dir))
def FindUpwardParent(start_dir, *desired_list):
"""Finds the desired object's parent, searching upward from the start_dir.
Searches start_dir and all its parents looking for the desired directory
or file, which may be given in one or more path components. Returns the
first directory in which the top desired path component was found, or raises
PathNotFound if it wasn't.
"""
desired_path = os.path.join(*desired_list)
last_dir = ''
cur_dir = start_dir
found_path = os.path.join(cur_dir, desired_path)
while not os.path.exists(found_path):
last_dir = cur_dir
cur_dir = os.path.dirname(cur_dir)
if last_dir == cur_dir:
raise PathNotFound('Unable to find %s above %s' %
(desired_path, start_dir))
found_path = os.path.join(cur_dir, desired_path)
# Strip the entire original desired path from the end of the one found
# and remove a trailing path separator, if present.
found_path = found_path[:len(found_path) - len(desired_path)]
if found_path.endswith(os.sep):
found_path = found_path[:len(found_path) - 1]
return found_path
def FindUpward(start_dir, *desired_list):
"""Returns a path to the desired directory or file, searching upward.
Searches start_dir and all its parents looking for the desired directory
or file, which may be given in one or more path components. Returns the full
path to the desired object, or raises PathNotFound if it wasn't found.
"""
parent = FindUpwardParent(start_dir, *desired_list)
return os.path.join(parent, *desired_list)
def MaybeMakeDirectory(*path):
"""Creates an entire path, if it doesn't already exist."""
file_path = os.path.join(*path)
try:
os.makedirs(file_path)
except OSError, e:
# errno.EEXIST is "File exists". If we see another error, re-raise.
if e.errno != errno.EEXIST:
raise
| bsd-3-clause |
bemuzie/pyzotero | test/test_zotero.py | 2 | 17470 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tests for the Pyzotero module
Copyright Stephan Hügel, 2011
This file is part of Pyzotero.
Pyzotero is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Pyzotero is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Pyzotero. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import unittest
import httpretty
from httpretty import HTTPretty
from pyzotero.pyzotero import zotero as z
from dateutil import parser
# Python 3 compatibility faffing
try:
from urllib import urlencode
from urllib import quote
from urlparse import urlparse
from urlparse import parse_qs
except ImportError:
from urllib.parse import urlencode
from urllib.parse import urlparse
from urllib.parse import parse_qs
from urllib.parse import quote
class ZoteroTests(unittest.TestCase):
""" Tests for pyzotero
"""
cwd = os.path.dirname(os.path.realpath(__file__))
def get_doc(self, doc_name, cwd=cwd):
""" return the requested test document """
with open(os.path.join(cwd, 'api_responses', '%s' % doc_name), 'r') as f:
return f.read()
def setUp(self):
""" Set stuff up
"""
self.item_doc = self.get_doc('item_doc.json')
self.items_doc = self.get_doc('items_doc.json')
self.collections_doc = self.get_doc('collections_doc.json')
self.collection_doc = self.get_doc('collection_doc.json')
self.citation_doc = self.get_doc('citation_doc.xml')
# self.biblio_doc = self.get_doc('bib_doc.xml')
self.attachments_doc = self.get_doc('attachments_doc.json')
self.tags_doc = self.get_doc('tags_doc.json')
self.groups_doc = self.get_doc('groups_doc.json')
self.item_templt = self.get_doc('item_template.json')
self.item_types = self.get_doc('item_types.json')
self.keys_response = self.get_doc('keys_doc.txt')
self.creation_doc = self.get_doc('creation_doc.json')
self.item_file = self.get_doc('item_file.pdf')
# Add the item file to the mock response by default
HTTPretty.enable()
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/items',
content_type='application/json',
body=self.items_doc)
@httpretty.activate
def testFailWithoutCredentials(self):
""" Instance creation should fail, because we're leaving out a
credential
"""
with self.assertRaises(z.ze.MissingCredentials):
_ = z.Zotero('myuserID')
@httpretty.activate
def testRequestBuilder(self):
""" Should url-encode all added parameters
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
zot.add_parameters(limit=0, start=7)
self.assertEqual(
parse_qs('start=7&limit=0&format=json'),
parse_qs(zot.url_params))
# @httpretty.activate
# def testBuildQuery(self):
# """ Check that spaces etc. are being correctly URL-encoded and added
# to the URL parameters
# """
# orig = 'https://api.zotero.org/users/myuserID/tags/hi%20there/items?start=10&format=json'
# zot = z.Zotero('myuserID', 'user', 'myuserkey')
# zot.add_parameters(start=10)
# query_string = '/users/{u}/tags/hi there/items'
# query = zot._build_query(query_string)
# self.assertEqual(
# sorted(parse_qs(orig).items()),
# sorted(parse_qs(query).items()))
@httpretty.activate
def testParseItemJSONDoc(self):
""" Should successfully return a list of item dicts, key should match
input doc's zapi:key value, and author should have been correctly
parsed out of the XHTML payload
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/items',
content_type='application/json',
body=self.item_doc)
items_data = zot.items()
self.assertEqual(u'X42A7DEE', items_data['data']['key'])
self.assertEqual(u'Institute of Physics (Great Britain)', items_data['data']['creators'][0]['name'])
self.assertEqual(u'book', items_data['data']['itemType'])
test_dt = parser.parse("2011-01-13T03:37:29Z")
incoming_dt = parser.parse(items_data['data']['dateModified'])
self.assertEqual(test_dt, incoming_dt)
@httpretty.activate
def testGetItemFile(self):
"""
Should successfully return a binary string with a PDF content
"""
zot = z.Zotero('myuserid', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserid/items/MYITEMID/file',
content_type='application/pdf',
body=self.item_file)
items_data = zot.file('myitemid')
self.assertEqual(b'One very strange PDF\n', items_data)
@httpretty.activate
def testParseAttachmentsJSONDoc(self):
""" Ensure that attachments are being correctly parsed """
zot = z.Zotero('myuserid', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserid/items',
content_type='application/json',
body=self.attachments_doc)
attachments_data = zot.items()
self.assertEqual(u'1641 Depositions', attachments_data['data']['title'])
@httpretty.activate
def testParseKeysResponse(self):
""" Check that parsing plain keys returned by format = keys works """
zot = z.Zotero('myuserid', 'user', 'myuserkey')
zot.url_params = 'format=keys'
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserid/items?format=keys',
content_type='text/plain',
body=self.keys_response)
response = zot.items()
self.assertEqual(u'JIFWQ4AN', response[:8])
@httpretty.activate
def testParseChildItems(self):
""" Try and parse child items """
zot = z.Zotero('myuserid', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserid/items/ABC123/children',
content_type='application/json',
body=self.items_doc)
items_data = zot.children('ABC123')
self.assertEqual(u'NM66T6EF', items_data[0]['key'])
@httpretty.activate
def testCitUTF8(self):
""" ensure that unicode citations are correctly processed by Pyzotero
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
url = 'https://api.zotero.org/users/myuserID/items/GW8V2CK7'
HTTPretty.register_uri(
HTTPretty.GET,
url,
content_type='application/atom+xml',
body=self.citation_doc)
cit = zot.item('GW8V2CK7', content='citation', style='chicago-author-date')
self.assertEqual(
cit[0],
u'<span>(Ans\\xe6lm and Tka\\u010dik 2014)</span>')
# @httpretty.activate
# def testParseItemAtomBibDoc(self):
# """ Should match a DIV with class = csl-entry
# """
# zot = z.Zotero('myuserID', 'user', 'myuserkey')
# zot.url_params = 'content=bib'
# HTTPretty.register_uri(
# HTTPretty.GET,
# 'https://api.zotero.org/users/myuserID/items?content=bib&format=atom',
# content_type='application/atom+xml',
# body=self.biblio_doc)
# items_data = zot.items()
# self.assertEqual(
# items_data[0],
# u'<div class="csl-entry">Robert A. Caro. \u201cThe Power Broker\u202f: Robert Moses and the Fall of New York,\u201d 1974.</div>'
# )
@httpretty.activate
def testParseCollectionJSONDoc(self):
""" Should successfully return a single collection dict,
'name' key value should match input doc's name value
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/collections/KIMI8BSG',
content_type='application/json',
body=self.collection_doc)
collections_data = zot.collection('KIMI8BSG')
self.assertEqual(
"LoC",
collections_data['data']['name'])
@httpretty.activate
def testParseCollectionsJSONDoc(self):
""" Should successfully return a list of collection dicts, key should
match input doc's zapi:key value, and 'title' value should match
input doc's title value
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/collections',
content_type='application/json',
body=self.collections_doc)
collections_data = zot.collections()
self.assertEqual(
"LoC",
collections_data[0]['data']['name'])
@httpretty.activate
def testParseTagsJSON(self):
""" Should successfully return a list of tags
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/tags?limit=1',
content_type='application/json',
body=self.tags_doc)
tags_data = zot.tags()
self.assertEqual(u'Community / Economic Development', tags_data[0])
@httpretty.activate
def testParseGroupsJSONDoc(self):
""" Should successfully return a list of group dicts, ID should match
input doc's zapi:key value, and 'total_items' value should match
input doc's zapi:numItems value
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/groups',
content_type='application/json',
body=self.groups_doc)
groups_data = zot.groups()
self.assertEqual('smart_cities', groups_data[0]['data']['name'])
def testParamsReset(self):
""" Should successfully reset URL parameters after a query string
is built
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
zot.add_parameters(start=5, limit=10)
zot._build_query('/whatever')
zot.add_parameters(start=2)
self.assertEqual(
parse_qs('start=2&format=json'),
parse_qs(zot.url_params))
@httpretty.activate
def testParamsBlankAfterCall(self):
""" self.url_params should be blank after an API call
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/items',
content_type='application/json',
body=self.items_doc)
_ = zot.items()
self.assertEqual(None, zot.url_params)
@httpretty.activate
def testResponseForbidden(self):
""" Ensure that an error is properly raised for 403
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/items',
content_type='application/json',
body=self.items_doc,
status=403)
with self.assertRaises(z.ze.UserNotAuthorised):
zot.items()
@httpretty.activate
def testResponseUnsupported(self):
""" Ensure that an error is properly raised for 400
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/items',
content_type='application/json',
body=self.items_doc,
status=400)
with self.assertRaises(z.ze.UnsupportedParams):
zot.items()
@httpretty.activate
def testResponseNotFound(self):
""" Ensure that an error is properly raised for 404
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/items',
body=self.items_doc,
content_type='application/json',
status=404)
with self.assertRaises(z.ze.ResourceNotFound):
zot.items()
@httpretty.activate
def testResponseMiscError(self):
""" Ensure that an error is properly raised for unspecified errors
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/users/myuserID/items',
content_type='application/json',
body=self.items_doc,
status=500)
with self.assertRaises(z.ze.HTTPError):
zot.items()
@httpretty.activate
def testGetItems(self):
""" Ensure that we can retrieve a list of all items """
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/itemTypes',
body=self.item_types)
t = zot.item_types()
self.assertEqual(t[0]['itemType'], 'artwork')
self.assertEqual(t[-1]['itemType'], 'webpage')
@httpretty.activate
def testGetTemplate(self):
""" Ensure that item templates are retrieved and converted into dicts
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/items/new?itemType=book',
content_type='application/json',
body=self.item_templt)
t = zot.item_template('book')
self.assertEqual('book', t['itemType'])
def testCreateCollectionError(self):
""" Ensure that collection creation fails with the wrong dict
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
t = [{'foo': 'bar'}]
with self.assertRaises(z.ze.ParamNotPassed):
t = zot.create_collection(t)
# @httpretty.activate
# def testUpdateItem(self):
# """ Test that we can update an item
# This test is a kludge; it only tests that the mechanism for
# internal key removal is OK, and that we haven't made any silly
# list/dict comprehension or genexpr errors
# """
# import json
# # first, retrieve an item
# zot = z.Zotero('myuserID', 'user', 'myuserkey')
# HTTPretty.register_uri(
# HTTPretty.GET,
# 'https://api.zotero.org/users/myuserID/items',
# body=self.items_doc)
# items_data = zot.items()
# items_data['title'] = 'flibble'
# json.dumps(*zot._cleanup(items_data))
@httpretty.activate
def testItemCreation(self):
""" Tests creation of a new item using a template
"""
zot = z.Zotero('myuserID', 'user', 'myuserkey')
HTTPretty.register_uri(
HTTPretty.GET,
'https://api.zotero.org/items/new?itemType=book',
body=self.item_templt,
content_type='application/json')
template = zot.item_template('book')
httpretty.reset()
HTTPretty.register_uri(
HTTPretty.POST,
'https://api.zotero.org/users/myuserID/items',
body=self.creation_doc,
content_type='application/json',
status=200)
# now let's test something
resp = zot.create_items([template])
self.assertEqual('ABC123', resp['success']['0'])
def testTooManyItems(self):
""" Should fail because we're passing too many items
"""
itms = [i for i in range(51)]
zot = z.Zotero('myuserID', 'user', 'myuserkey')
with self.assertRaises(z.ze.TooManyItems):
zot.create_items(itms)
# @httprettified
# def testRateLimit(self):
# """ Test 429 response handling (e.g. wait, wait a bit longer etc.)
# """
# zot = z.Zotero('myuserID', 'user', 'myuserkey')
# HTTPretty.register_uri(
# HTTPretty.GET,
# 'https://api.zotero.org/users/myuserID/items',
# responses=[
# HTTPretty.Response(body=self.items_doc, status=429),
# HTTPretty.Response(body=self.items_doc, status=429),
# HTTPretty.Response(body=self.items_doc, status=200)])
# zot.items()
# with self.assertEqual(z.backoff.delay, 8):
# zot.items()
def tearDown(self):
""" Tear stuff down
"""
HTTPretty.disable()
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
annapowellsmith/openpresc | openprescribing/frontend/management/commands/import_practices.py | 1 | 3793 | import csv
import glob
from django.core.management.base import BaseCommand
from frontend.models import Practice, PCT
class Command(BaseCommand):
args = ''
help = 'Imports practice data either from epraccur.csv, or from HSCIC '
help += 'address files, depending on options. '
def add_arguments(self, parser):
parser.add_argument('--hscic_address')
parser.add_argument('--epraccur')
def handle(self, *args, **options):
self.IS_VERBOSE = False
if options['verbosity'] > 1:
self.IS_VERBOSE = True
if options['epraccur']:
self.import_practices_from_epraccur(options['epraccur'])
else:
practice_files = []
if options['hscic_address']:
practice_files = [options['hscic_address']]
else:
practice_files = glob.glob('./data/raw_data/T*ADDR*')
for f in practice_files:
self.import_practices_from_hscic(f)
def parse_date(self, d):
return '-'.join([d[:4], d[4:6], d[6:]])
def _strip_dict(self, row):
'''
Strip whitespace from keys and values in dictionary.
'''
for k in row:
if row[k]:
row[k] = row[k].strip()
row[k.strip()] = row.pop(k)
return row
def import_practices_from_epraccur(self, filename):
entries = csv.reader(open(filename, 'rU'))
count = 0
for row in entries:
row = [r.strip() for r in row]
practice, created = Practice.objects.get_or_create(
code=row[0]
)
practice.name = row[1]
practice.address1 = row[4]
practice.address2 = row[5]
practice.address3 = row[6]
practice.address4 = row[7]
practice.address5 = row[8]
practice.postcode = row[9]
practice.open_date = self.parse_date(row[10])
if row[11]:
practice.close_date = self.parse_date(row[11])
practice.status_code = row[12]
if not practice.ccg_change_reason:
try:
# Not all practices have a CCG - the ones that don't are
# mostly in Jersey, Isle of Man, etc.
pco_code = row[23].strip()
ccg = PCT.objects.get(code=pco_code)
practice.ccg = ccg
except PCT.DoesNotExist:
if self.IS_VERBOSE:
print 'ccg not found with code', pco_code
if row[15]:
practice.join_provider_date = self.parse_date(row[15])
if row[16]:
practice.leave_provider_date = self.parse_date(row[16])
practice.setting = row[-2]
practice.save()
if created:
count += 1
if self.IS_VERBOSE:
print '%s Practice objects created from epraccur' % count
def import_practices_from_hscic(self, filename):
if self.IS_VERBOSE:
print 'Importing practices from %s' % filename
count = 0
practices = csv.reader(open(filename, 'rU'))
for row in practices:
row = [i.strip() for i in row]
p, created = Practice.objects.get_or_create(
code=row[1]
)
if created:
p.name = row[2]
p.address1 = row[3]
p.address2 = row[4]
p.address3 = row[5]
p.address4 = row[6]
p.postcode = row[7]
p.save()
if created:
count += 1
if self.IS_VERBOSE:
print '%s Practice objects created from HSCIC' % count
| mit |
saurabhjn76/sympy | sympy/stats/rv_interface.py | 88 | 5205 | from __future__ import print_function, division
from .rv import (probability, expectation, density, where, given, pspace, cdf,
sample, sample_iter, random_symbols, independent, dependent,
sampling_density)
from sympy import sqrt
__all__ = ['P', 'E', 'density', 'where', 'given', 'sample', 'cdf', 'pspace',
'sample_iter', 'variance', 'std', 'skewness', 'covariance',
'dependent', 'independent', 'random_symbols', 'correlation',
'moment', 'cmoment', 'sampling_density']
def moment(X, n, c=0, condition=None, **kwargs):
"""
Return the nth moment of a random expression about c i.e. E((X-c)**n)
Default value of c is 0.
Examples
========
>>> from sympy.stats import Die, moment, E
>>> X = Die('X', 6)
>>> moment(X, 1, 6)
-5/2
>>> moment(X, 2)
91/6
>>> moment(X, 1) == E(X)
True
"""
return expectation((X - c)**n, condition, **kwargs)
def variance(X, condition=None, **kwargs):
"""
Variance of a random expression
Expectation of (X-E(X))**2
Examples
========
>>> from sympy.stats import Die, E, Bernoulli, variance
>>> from sympy import simplify, Symbol
>>> X = Die('X', 6)
>>> p = Symbol('p')
>>> B = Bernoulli('B', p, 1, 0)
>>> variance(2*X)
35/3
>>> simplify(variance(B))
p*(-p + 1)
"""
return cmoment(X, 2, condition, **kwargs)
def standard_deviation(X, condition=None, **kwargs):
"""
Standard Deviation of a random expression
Square root of the Expectation of (X-E(X))**2
Examples
========
>>> from sympy.stats import Bernoulli, std
>>> from sympy import Symbol, simplify
>>> p = Symbol('p')
>>> B = Bernoulli('B', p, 1, 0)
>>> simplify(std(B))
sqrt(p*(-p + 1))
"""
return sqrt(variance(X, condition, **kwargs))
std = standard_deviation
def covariance(X, Y, condition=None, **kwargs):
"""
Covariance of two random expressions
The expectation that the two variables will rise and fall together
Covariance(X,Y) = E( (X-E(X)) * (Y-E(Y)) )
Examples
========
>>> from sympy.stats import Exponential, covariance
>>> from sympy import Symbol
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> X = Exponential('X', rate)
>>> Y = Exponential('Y', rate)
>>> covariance(X, X)
lambda**(-2)
>>> covariance(X, Y)
0
>>> covariance(X, Y + rate*X)
1/lambda
"""
return expectation(
(X - expectation(X, condition, **kwargs)) *
(Y - expectation(Y, condition, **kwargs)),
condition, **kwargs)
def correlation(X, Y, condition=None, **kwargs):
"""
Correlation of two random expressions, also known as correlation
coefficient or Pearson's correlation
The normalized expectation that the two variables will rise
and fall together
Correlation(X,Y) = E( (X-E(X)) * (Y-E(Y)) / (sigma(X) * sigma(Y)) )
Examples
========
>>> from sympy.stats import Exponential, correlation
>>> from sympy import Symbol
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> X = Exponential('X', rate)
>>> Y = Exponential('Y', rate)
>>> correlation(X, X)
1
>>> correlation(X, Y)
0
>>> correlation(X, Y + rate*X)
1/sqrt(1 + lambda**(-2))
"""
return covariance(X, Y, condition, **kwargs)/(std(X, condition, **kwargs)
* std(Y, condition, **kwargs))
def cmoment(X, n, condition=None, **kwargs):
"""
Return the nth central moment of a random expression about its mean
i.e. E((X - E(X))**n)
Examples
========
>>> from sympy.stats import Die, cmoment, variance
>>> X = Die('X', 6)
>>> cmoment(X, 3)
0
>>> cmoment(X, 2)
35/12
>>> cmoment(X, 2) == variance(X)
True
"""
mu = expectation(X, condition, **kwargs)
return moment(X, n, mu, condition, **kwargs)
def smoment(X, n, condition=None, **kwargs):
"""
Return the nth Standardized moment of a random expression i.e.
E( ((X - mu)/sigma(X))**n )
Examples
========
>>> from sympy.stats import skewness, Exponential, smoment
>>> from sympy import Symbol
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> Y = Exponential('Y', rate)
>>> smoment(Y, 4)
9
>>> smoment(Y, 4) == smoment(3*Y, 4)
True
>>> smoment(Y, 3) == skewness(Y)
True
"""
sigma = std(X, condition, **kwargs)
return (1/sigma)**n*cmoment(X, n, condition, **kwargs)
def skewness(X, condition=None, **kwargs):
"""
Measure of the asymmetry of the probability distribution
Positive skew indicates that most of the values lie to the right of
the mean
skewness(X) = E( ((X - E(X))/sigma)**3 )
Examples
========
>>> from sympy.stats import skewness, Exponential, Normal
>>> from sympy import Symbol
>>> X = Normal('X', 0, 1)
>>> skewness(X)
0
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> Y = Exponential('Y', rate)
>>> skewness(Y)
2
"""
return smoment(X, 3, condition, **kwargs)
P = probability
E = expectation
| bsd-3-clause |
AlexBryner/SalesforceTools | SalesforceScripts.py | 1 | 12737 | # coding: utf-8
import numpy as np
import pandas as pd
import time
from datetime import datetime, timedelta, date
from time import sleep, gmtime, strftime
from pandas import DataFrame, Series, read_csv
from salesforce_bulk_api import SalesforceBulkJob
from SalesforceBulkQuery import *
from simple_salesforce import *
###################################################################################################
# Salesforce Credentials
# Creates SimpleSalesforce Login Instance
sf = Salesforce(username='', password='', security_token='', sandbox=, client_id='')
###################################################################################################
def getBlankDF():
return pd.DataFrame(np.nan, index=[], columns=[])
def NameCaseAsTitles(x):
if (str(x).isupper() or str(x).islower()) and '@' not in str(x):
return str(x).title()
else:
return x
def getDate(days):
return(datetime.today() - timedelta(days=days)).strftime('%Y-%m-%dT00:00:00z') # YYYY-MM-DDThh:mm:ssz
def SFNulls(df, FillWith='#N/A'):
"""
Description: Fills 0's and NAN's with "#N/A" which is the value that the Salesforce Bulk API recognizes as Null.
Parameters:
df = Pandas.DataFrame
Recognizes 'float64', 'int64', and 'int32' data types.
"""
df.apply(lambda s: pd.to_numeric(s, errors='ignore'))
NumCol = df.columns.values.tolist()
for col in NumCol:
df[col] = df[col].replace(0, np.NAN).fillna('%s' % FillWith)
def SFQuery(SOQL: str, InList=None, LowerHeaders=True, CheckParentChild=True, KeepAttributes=False):
"""
Description: Queries Salesforce returning all results in a pandas dataframe. This also sets all possible data types to numbers and sets column headers to lower case. If using InList, this functionality is built with pandas dataframe columns in mind to help simplify filtering from other SOQL results.
Parameters:
SOQL = Salesforce SOQL Statement
InList* = List of items for an "IN" filter. Apex SOQL - "SELECT Id, Name FROM Account Where Id IN :ids"
SOQL parameter must be written out to the point where the : would be set in a SOQL query in Apex.
EX: SFQuery("SELECT Id, Name From Contact WHERE FirstName = 'Alex' and Id IN", IdsList)
InList format - ['id1', 'id2', 'id2', 'id3', 'id3', 'id4', 'id5'] becomes ('id1', 'id2', 'id3', 'id4', 'id5')
I usually use this with a dataframe column.
ex: "SFQuery("Select Id, Name From Contact Where Id In", InList=list(your_dataframe['column_name']))
LowerHeader = Returns Dataframe with column headers lowercase, defaulted true for previous projects
CheckParentChild = This checks for the relationships by looking for the ordered dictionaries returned by Salesforce. It loops through to ensure reached the end of the line if stepping through multiple parent relationships. Turn off if queries need to run slighly faster.
InList* - This is not an efficent use of api calls. There are limitations to the length of the queries so this is capped out at a default of 300 elements. Nested Select statements in the where clause is a more efficent use for api calls but there are always tradeoffs. At some point it would make more sense to utilize tuples, but unfortunately salesforce did not like the format with the last comma.
"""
def basicSOQL(SOQLstr : str):
# formats the Salesforce ordered dictionary into a pandas dataframe
try:
od = sf.query_all("%s" % SOQLstr)
items = {val: dict(od['records'][val]) for val in range(len(od['records'])) }
res = DataFrame.from_dict(items, orient='index')
if LowerHeaders == True:
res.columns = map(str.lower, res.columns)
return res.apply(lambda s: pd.to_numeric(s, errors='ignore'))
except ValueError:
pass
def CreateFilterStr(ListToStr):
# creates a string from a list
# ['id1', 'id2', 'id3', 'id4', 'id5'] -> ('id1', 'id2', 'id3', 'id4', 'id5')
resStr = "("
r = 0
for rl in ListToStr:
if rl is not None:
if r == 0:
resStr += "'"+str(rl)+"'"
r = 1
elif r == 1:
resStr += ",'"+str(rl)+"'"
resStr += ")"
return resStr
def BatchQueryList(toBatchList):
# filters the list of duplicates then batches the lists in groups
# [('id1', 'id2', 'id3', id4', 'id5'),('id6', 'id7', 'id8', 'id9', 'id10')]
batchSize = 300
newList = list(set(toBatchList))
listSize = len(newList)
startPoint = 0
endPoint = batchSize
res = []
while startPoint < listSize:
tempStr = CreateFilterStr(newList[startPoint:endPoint])
res.append([tempStr])
startPoint = endPoint
endPoint += batchSize
return res
def InListQuery(SOQL, InList):
# runs a query for each list from the batched lists and stacks the results
filterLists = BatchQueryList(InList)
resDF = None
i = 0
for i in range(0,len(filterLists)):
tempDF = basicSOQL(SOQLstr = "%s %s" % (SOQL, filterLists[i][0]))
try: resDF = resDF.append(tempDF, ignore_index=True)
except AttributeError: resDF = tempDF
i += 1
return resDF
def getChildRecords(obj, row):
if row == None:
return None
size = row.get('totalSize')
records = row.get('records')
tempDic = {}
for i in range(0,size):
tempDic[i] = {}
for field in records[i].keys():
try:
records[i].get(field).keys()
continue
except AttributeError:
pass
tempDic[i][obj + '.' + field] = records[i].get(field)
return tempDic
def getParentRecords(field, row):
if row == None:
return None
else:
return row.get(field)
rs = None
if InList == None:
rs = basicSOQL(SOQL)
else:
InList = list(InList)
rs = InListQuery(SOQL, InList)
# Drops the attributes column passed through by Salesforce
if CheckParentChild == False and KeepAttributes == False:
rs = rs.drop(['attributes'], axis=1)
while CheckParentChild:
CheckParentChild = False
indexCols = []
for col in rs:
obj = None
relationship = None
for i in range(len(rs[col])):
# scans down each column until finding an ordered dict to parse
if rs[col][i] == None:
continue
try:
if rs[col][i].get('type') != None and col == 'attributes':
if KeepAttributes == False:
rs = rs.drop([col], axis=1)
break
except AttributeError:
indexCols.append(col) # will use this later for creating a multi indexed dataframe
break
# Determines whether parent or child query and the object type
try:
obj = rs[col][i].get('attributes').get('type')
relationship = 'Parent'
except:
pass
try:
obj = rs[col][i].get('records')[0].get('attributes').get('type')
relationship = 'Child'
except:
pass
break
if relationship == 'Child' and obj != None:
rs[col] = rs.apply(lambda row: getChildRecords(obj, row[col]), axis=1)
elif relationship == 'Parent' and obj != None:
fields = []
for i in range(len(rs[col])):
if rs[col][i] != None:
fields.extend(list(rs[col][i].keys()))
fields = list(set(fields))
if KeepAttributes == False:
try:
fields.remove('attributes')
except ValueError:
pass
for field in fields:
rs[obj + '.' + field] = rs.apply(lambda row: getParentRecords(field, row[col]), axis=1)
rs = rs.drop([col], axis=1)
CheckParentChild = True
# next I'd like to setup an option for child relationship queries to return a multi indexed dataframe
# print(indexCols)
return rs
def SFFormat(df, SObject, EnforceNulls=False):
"""
Description: Looks up data types and dynamically formats columns to a correct format for the Bulk Api. Returns error messages for invalid data types or column headers. If EnforceNulls is true fills all blanks with #N/A, if false will set blanks to ''.
Parameters:
df = Pandas.DataFrame
SObject = Type of object for the upload. Ex: 'Account'
EnforceNulls = If true will fill blanks with #N/A to set as null in Salesforce
*Currently only formats dates and datetimes
"""
NoFieldError = ''
InvalidDataError = ''
df.columns = map(str.lower, df.columns)
fieldDict = getattr(sf, '%s' % SObject).describe()["fields"]
numFields = len(fieldDict)
NumCol = df.columns.values.tolist()
for col in NumCol:
i = 0
for x in fieldDict:
if x['name'].lower() == col:
dtype = x['type']
length = x['length']
try:
if dtype == 'date':
df[col] = pd.to_datetime(df[col]).dt.strftime('%Y-%m-%d').replace(to_replace='NaT', value='#N/A')
elif dtype == 'datetime':
df[col] = pd.to_datetime(df[col]).dt.strftime('%Y-%m-%dT%H:%M:%S').replace(to_replace='NaT', value='#N/A')
except ValueError:
InvalidDataError += ("Invalid "+dtype+" : "+col+"\n")
break
i += 1
if i >= numFields:
NoFieldError += (SObject+" does not contain : "+col+"\n")
SFNulls(df)
if EnforceNulls == False:
for col in NumCol:
df[col] = df[col].replace('#N/A','')
errors = NoFieldError+InvalidDataError
if len(errors) > 0:
return(errors)
else:
return('No Errors')
def SFUpload(df, UploadType, Sobject, batchSize=49995, hangtime=0):
"""
Description: Upload a pandas dataframe through the Salesforce Bulk API in batches of 50k. Can run either an insert or update to the listed Sobject. Sobject and UploadType must be listed as a string. ex: 'Update', 'Account'
Parameters:
df = Pandas.DataFrame
UploadType = Update or Insert
Sobject = Salesforce object in the upload. Ex - Accounts, Contact
batchSize = Number of rows that the upload will run before submitting the next group of rows in the dataset. Defaults to 49,995 (5 batches of 9999)
hangtime = Number of seconds to wait before uploading a new batch. Defaults to 0.
"""
if len(df) == 0:
return
startRow = 0
endRow = batchSize
while startRow < len(df):
upload = df[startRow:endRow]
Headers = upload.columns.tolist()
Data = upload.to_records(index=False)
job = SalesforceBulkJob(UploadType, Sobject, salesforce=sf)
job.upload(Headers,Data)
startRow = endRow
endRow = startRow + batchSize
time.sleep(hangtime)
def SFBulkQuery(SObject, SOQL):
"""
Description: Runs a query through the bulk api. Creates, Tracks, and Closes the Request and returns the results as a Pandas Dataframe. Currently there are lots of slighly obnoxious messages to help with tracking the current status.
Parameters:
SObject = Salesforce Object, ex: Account, Contact
SOQL = Salesforce SOQL Statement for bulk query
"""
sfbulk = SalesforceBulk(sessionId=sf.session_id, host=sf.sf_instance)
job = sfbulk.create_query_job(SObject, contentType='CSV')
batch = sfbulk.query(job, SOQL)
while not sfbulk.is_batch_done(job, batch):
time.sleep(10)
sfbulk.close_job(job)
res = sfbulk.get_batch_result_iter(job, batch)
return res
| mit |
Eigenstate/msmbuilder | msmbuilder/commands/implied_timescales.py | 12 | 5214 | # Author: Robert McGibbon <[email protected]>
# Contributors:
# Copyright (c) 2014, Stanford University
# All rights reserved.
"""Scan the implied timescales of MarkovStateModels with respect to lag time.
This command will build a series of MarkovStateModels at different lag times,
and save a file to disk containing the relaxation timescales of each of the
models.
A plot of these data can then be used to choose the lag time [1].
References
----------
.. [1] Beauchamp, Kyle A., et al. "MSMBuilder2: modeling conformational
dynamics on the picosecond to millisecond scale." J. Chem. Theory.
Comput. 7.10 (2011): 3412-3419.
"""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
from __future__ import print_function, division, absolute_import
from os.path import splitext
import sys
import json
import pandas as pd
from ..dataset import dataset
from ..cmdline import Command, argument, argument_group, rangetype, FlagAction
from ..msm import MarkovStateModel, implied_timescales
class ImpliedTimescales(Command):
_group = 'MSM'
_concrete = True
description = __doc__
lag_times = argument('-l', '--lag_times', default='1:10', help='''Range
of lag times. Specify as 'start:stop' or 'start:stop:step. The
endpoints are inclusive.''', type=rangetype)
inp = argument(
'-i', '--inp', help='''Path to input dataset, a collection of 1D
integer sequences (such as the output from clustering)''',
required=True)
out = argument('--out', help='''Output file''',
default='timescales.csv')
fmt = argument('--fmt', help='Output file format', default='csv',
choices=('csv', 'json', 'excel'))
_extensions = {'csv': '.csv', 'json': '.json', 'excel': '.xlsx'}
n_jobs = argument('--n_jobs', help='Number of parallel processes',
default=1, type=int)
p = argument_group('MSM parameters')
n_timescales = p.add_argument('--n_timescales', default=10, help='''
The number of dynamical timescales to calculate when diagonalizing
the transition matrix.''', type=int)
reversible_type = p.add_argument('--reversible_type', help='''
Method by which the reversibility of the transition matrix
is enforced. 'mle' uses a maximum likelihood method that is
solved by numerical optimization, and 'transpose'
uses a more restrictive (but less computationally complex)
direct symmetrization of the expected number of counts.''',
choices=('mle', 'transpose'), default='mle')
ergodic_cutoff = p.add_argument('--ergodic_cutoff', default=1, help='''
Only the maximal strongly ergodic subgraph of the data is used to build
an MSM. Ergodicity is determined by ensuring that each state is
accessible from each other state via one or more paths involving edges
with a number of observed directed counts greater than or equal to
``ergodic_cutoff``. Not that by setting ``ergodic_cutoff`` to 0, this
trimming is effectively turned off.''', type=int)
prior_counts = p.add_argument('--prior_counts', help='''Add a number
of "pseudo counts" to each entry in the counts matrix. When
prior_counts == 0 (default), the assigned transition probability
between two states with no observed transitions will be zero, whereas
when prior_counts > 0, even this unobserved transitions will be
given nonzero probability.''', type=float, default=0)
verbose = p.add_argument('--verbose', default=True,
help='Enable verbose printout', action=FlagAction)
def __init__(self, args):
self.args = args
def start(self):
kwargs = {
'n_timescales': self.args.n_timescales,
'reversible_type': self.args.reversible_type,
'ergodic_cutoff': self.args.ergodic_cutoff,
'prior_counts': self.args.prior_counts,
'verbose': self.args.verbose,
}
with dataset(self.args.inp, mode='r') as ds:
model = MarkovStateModel(**kwargs)
lines = implied_timescales(
ds, lag_times=self.args.lag_times,
n_timescales=self.args.n_timescales,
msm=model,
n_jobs=self.args.n_jobs,
verbose=self.args.verbose)
cols = ['Timescale %d' % (d+1) for d in range(len(lines[0]))]
df = pd.DataFrame(data=lines, columns=cols)
df['Lag Time'] = self.args.lag_times
df = df.reindex_axis(sorted(df.columns), axis=1)
self.write_output(df)
def write_output(self, df):
outfile = splitext(self.args.out)[0] + self._extensions[self.args.fmt]
print('Writing %s' % outfile)
if self.args.fmt == 'csv':
df.to_csv(outfile)
elif self.args.fmt == 'json':
with open(outfile, 'w') as f:
json.dump(df.to_dict(orient='records'), f)
elif self.args.fmt == 'excel':
df.to_excel(outfile)
else:
raise RuntimeError('unknown fmt: %s' % fmt)
print('All done!')
| lgpl-2.1 |
cpcloud/ibis | ibis/expr/tests/test_case.py | 3 | 3082 | import pytest
import ibis
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
import ibis.expr.types as ir
from ibis.tests.util import assert_equal
def test_ifelse(table):
bools = table.g.isnull()
result = bools.ifelse("foo", "bar")
assert isinstance(result, ir.StringColumn)
@pytest.mark.xfail(raises=AssertionError, reason='NYT')
def test_ifelse_literal():
assert False
def test_simple_case_expr(table):
case1, result1 = "foo", table.a
case2, result2 = "bar", table.c
default_result = table.b
expr1 = table.g.lower().cases(
[(case1, result1), (case2, result2)], default=default_result
)
expr2 = (
table.g.lower()
.case()
.when(case1, result1)
.when(case2, result2)
.else_(default_result)
.end()
)
assert_equal(expr1, expr2)
assert isinstance(expr1, ir.IntegerColumn)
def test_multiple_case_expr(table):
case1 = table.a == 5
case2 = table.b == 128
case3 = table.c == 1000
result1 = table.f
result2 = table.b * 2
result3 = table.e
default = table.d
expr = (
ibis.case()
.when(case1, result1)
.when(case2, result2)
.when(case3, result3)
.else_(default)
.end()
)
op = expr.op()
assert isinstance(expr, ir.FloatingColumn)
assert isinstance(op, ops.SearchedCase)
assert op.default is default
@pytest.mark.xfail(raises=AssertionError, reason='NYT')
def test_simple_case_no_default():
# TODO: this conflicts with the null else cases below. Make a decision
# about what to do, what to make the default behavior based on what the
# user provides. SQL behavior is to use NULL when nothing else
# provided. The .replace convenience API could use the field values as
# the default, getting us around this issue.
assert False
def test_simple_case_null_else(table):
expr = table.g.case().when("foo", "bar").end()
op = expr.op()
assert isinstance(expr, ir.StringColumn)
assert isinstance(op.default, ir.ValueExpr)
assert isinstance(op.default.op(), ops.Cast)
assert op.default.op().to == dt.string
def test_multiple_case_null_else(table):
expr = ibis.case().when(table.g == "foo", "bar").end()
op = expr.op()
assert isinstance(expr, ir.StringColumn)
assert isinstance(op.default, ir.ValueExpr)
assert isinstance(op.default.op(), ops.Cast)
assert op.default.op().to == dt.string
@pytest.mark.xfail(raises=AssertionError, reason='NYT')
def test_case_type_precedence():
assert False
@pytest.mark.xfail(raises=AssertionError, reason='NYT')
def test_no_implicit_cast_possible():
assert False
def test_case_mixed_type():
t0 = ibis.table(
[('one', 'string'), ('two', 'double'), ('three', 'int32')],
name='my_data',
)
expr = (
t0.three.case()
.when(0, 'low')
.when(1, 'high')
.else_('null')
.end()
.name('label')
)
result = t0[expr]
assert result['label'].type().equals(dt.string)
| apache-2.0 |
lukeiwanski/tensorflow | tensorflow/python/kernel_tests/xent_op_test.py | 17 | 13895 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SoftmaxCrossEntropyWithLogits op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import sys
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class XentTest(test.TestCase):
def _npXent(self, features, labels, dim=-1):
if dim is -1:
dim = len(features.shape) - 1
one_only_on_dim = list(features.shape)
one_only_on_dim[dim] = 1
e = np.exp(
features - np.reshape(np.amax(features, axis=dim), one_only_on_dim))
probs = e / np.reshape(np.sum(e, axis=dim), one_only_on_dim)
bp = (probs - labels)
l = -np.sum(labels * np.log(probs + 1.0e-20), axis=dim)
return l, bp
def _testXent(self, np_features, np_labels, use_gpu=False):
np_loss, np_backprop = self._npXent(np_features, np_labels)
with self.test_session(use_gpu=use_gpu) as sess:
loss, backprop = gen_nn_ops.softmax_cross_entropy_with_logits(
np_features, np_labels)
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllCloseAccordingToType(np_loss, tf_loss)
self.assertAllCloseAccordingToType(np_backprop, tf_backprop)
def _testXentWrapper(self, np_features, np_labels, dim=-1, use_gpu=False):
np_loss, _ = self._npXent(np_features, np_labels, dim=dim)
with self.test_session(use_gpu=use_gpu) as sess:
loss = nn_ops.softmax_cross_entropy_with_logits(
labels=np_labels, logits=np_features, dim=dim)
tf_loss = sess.run(loss)
print("np_loss:", np_loss)
print("tf_loss:", tf_loss)
self.assertAllCloseAccordingToType(np_loss, tf_loss)
def _testAll(self, features, labels):
self._testXent(features, labels, use_gpu=False)
self._testXent(features, labels, use_gpu=True)
def _testSingleClass(self, use_gpu=False):
for dtype in np.float16, np.float32:
with self.test_session(use_gpu=use_gpu) as sess:
loss, backprop = gen_nn_ops.softmax_cross_entropy_with_logits(
np.array([[1.], [-1.], [0.]]).astype(dtype),
np.array([[-1.], [0.], [1.]]).astype(dtype))
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllClose([0.0, 0.0, 0.0], tf_loss)
self.assertAllClose([[2.0], [1.0], [0.0]], tf_backprop)
def testSingleClass(self):
self._testSingleClass(True)
self._testSingleClass(False)
def testRankTooLarge(self):
for dtype in np.float16, np.float32:
np_features = np.array([[[1., 1., 1., 1.]], [[1., 2., 3.,
4.]]]).astype(dtype)
np_labels = np.array([[[0., 0., 0., 1.]], [[0., .5, .5,
0.]]]).astype(dtype)
self.assertRaisesRegexp(ValueError, "rank 2, but is rank 3",
gen_nn_ops.softmax_cross_entropy_with_logits,
np_features, np_labels)
def testNpXent(self):
# We create 2 batches of logits for testing.
# batch 0 is the boring uniform distribution: 1, 1, 1, 1, with target 3.
# batch 1 has a bit of difference: 1, 2, 3, 4, with soft targets (1, 2).
features = [[1., 1., 1., 1.], [1., 2., 3., 4.]]
labels = [[0., 0., 0., 1.], [0., .5, .5, 0.]]
# For batch 0, we expect the uniform distribution: 0.25, 0.25, 0.25, 0.25
# With a hard target 3, the backprop is [0.25, 0.25, 0.25, -0.75]
# The loss for this batch is -log(0.25) = 1.386
#
# For batch 1, we have:
# exp(0) = 1
# exp(1) = 2.718
# exp(2) = 7.389
# exp(3) = 20.085
# SUM = 31.192
# So we have as probabilities:
# exp(0) / SUM = 0.032
# exp(1) / SUM = 0.087
# exp(2) / SUM = 0.237
# exp(3) / SUM = 0.644
# With a soft target (1, 2), the backprop is
# [0.032, 0.087 - 0.5 = -0.413, 0.237 - 0.5 = -0.263, 0.644]
# The loss for this batch is [0.5 * -log(0.087), 0.5 * -log(0.237)]
# = [1.3862, 1.9401]
np_loss, np_backprop = self._npXent(np.array(features), np.array(labels))
self.assertAllClose(
np.array([[0.25, 0.25, 0.25, -0.75], [0.0321, -0.4129, -0.2632,
0.6439]]),
np_backprop,
rtol=1.e-3,
atol=1.e-3)
self.assertAllClose(
np.array([1.3862, 1.9401]), np_loss, rtol=1.e-3, atol=1.e-3)
def testShapeBroadcast(self):
np_f = np.array([[1., 2., 3., 4.],
[1., 2., 3., 4.]]).astype(np.float32)
np_l = np.array([[0., 0., 0., 1.],
[0., .5, .5, 0.]]).astype(np.float32)
np_loss, np_backprop = self._npXent(np_f, np_l)
tf_f = constant_op.constant(
np.array([[1., 2., 3., 4.]]).astype(np.float32))
tf_l = constant_op.constant(
np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float32))
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu) as sess:
loss, backprop = gen_nn_ops.softmax_cross_entropy_with_logits(
tf_f, tf_l)
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllCloseAccordingToType(np_loss, tf_loss)
self.assertAllCloseAccordingToType(np_backprop, tf_backprop)
def testShapeMismatch(self):
with self.test_session():
with self.assertRaises(ValueError):
gen_nn_ops.softmax_cross_entropy_with_logits(
[[0., 1.], [2., 3.]], [[0., 1., 0.], [1., 0., 0.]])
def testNotMatrix(self):
with self.test_session():
with self.assertRaises(ValueError):
gen_nn_ops.softmax_cross_entropy_with_logits([0., 1., 2., 3.],
[0., 1., 0., 1.])
def testHalf(self):
self._testAll(
np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float16),
np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float16))
def testFloat(self):
self._testAll(
np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float32),
np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float32))
def testDouble(self):
self._testAll(
np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float64),
np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float64))
def testGradient(self):
with self.test_session() as sess:
l = constant_op.constant(
[0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.5],
shape=[3, 4],
dtype=dtypes.float64,
name="l")
f = constant_op.constant(
[0.1, 0.2, 0.3, 0.4, 0.1, 0.4, 0.9, 1.6, 0.1, 0.8, 2.7, 6.4],
shape=[3, 4],
dtype=dtypes.float64,
name="f")
x = nn_ops.softmax_cross_entropy_with_logits(
labels=l, logits=f, name="xent")
err = gradient_checker.compute_gradient_error(f, [3, 4], x, [3])
# Check that no extra computation performed. When only first derivative is requested,
# second derivative must not be computed. So when there is no second derivative,
# there is no `BatchMatMul` op in the graph.
op_names = [
op.op_def.name for op in sess.graph.get_operations() if op.op_def
]
self.assertNotIn("BatchMatMul", op_names)
print("cross entropy gradient err = ", err)
self.assertLess(err, 5e-8)
def testGradientLabelWithV2(self):
with self.test_session():
l = constant_op.constant(
[0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.5],
shape=[3, 4],
dtype=dtypes.float64,
name="l")
f = constant_op.constant(
[0.1, 0.2, 0.3, 0.4, 0.1, 0.4, 0.9, 1.6, 0.1, 0.8, 2.7, 6.4],
shape=[3, 4],
dtype=dtypes.float64,
name="f")
x = nn_ops.softmax_cross_entropy_with_logits_v2(
labels=l, logits=f, name="xent")
err = gradient_checker.compute_gradient_error(l, [3, 4], x, [3])
self.assertLess(err, 5e-8)
def testSecondGradient(self):
with self.test_session() as sess:
l = constant_op.constant(
[
0.0, 0.0, 1.0 / 3, 0.0, 1.0 / 3, 0.0, 0.0, 0.0, 0.0, 0.5 / 3, 0.0,
0.5 / 3
],
shape=[12],
dtype=dtypes.float64,
name="l")
f = constant_op.constant(
[0.1, 0.2, 0.3, 0.4, 0.1, 0.4, 0.9, 1.6, 0.1, 0.8, 2.7, 6.4],
shape=[12],
dtype=dtypes.float64,
name="f")
x = nn_ops.softmax_cross_entropy_with_logits(
labels=l, logits=f, name="xent")
loss = math_ops.reduce_sum(x)
gradients = gradients_impl.gradients(loss, [f])[0]
err = gradient_checker.compute_gradient_error(f, [12], gradients, [12])
# Check that second derivative is calculated.
# (it is equivalent to being `BatchMatMul` op in the graph because of implementation of xentropy grad)
op_names = [
op.op_def.name for op in sess.graph.get_operations() if op.op_def
]
self.assertIn("BatchMatMul", op_names)
print("cross entropy hessian err = ", err)
self.assertLess(err, 5e-8)
def testWrapper(self):
features = np.array([[[1., 1., 1., 1.], [1., 2., 3., 4.]],
[[2., 3., 4., 5.], [6., 7., 8., 9.]],
[[5., 4., 3., 2.], [1., 2., 3., 4.]]]).astype(
np.float32)
labels = np.array([[[0., 0., 0., 1.], [0., 1., 0., 0.]],
[[0., 0.5, 0.5, 0.], [0.5, 0.5, 0., 0.]],
[[0., 1., 0., 0.], [0., 0., 1., 0.]]]).astype(
np.float32)
self._testXentWrapper(features, labels, dim=0, use_gpu=False)
self._testXentWrapper(features, labels, dim=0, use_gpu=True)
self._testXentWrapper(features, labels, dim=1, use_gpu=False)
self._testXentWrapper(features, labels, dim=1, use_gpu=True)
self._testXentWrapper(features, labels, dim=-1, use_gpu=False)
self._testXentWrapper(features, labels, dim=-1, use_gpu=True)
def testZeroDimension(self):
features = np.zeros([0, 2, 4]).astype(np.float32)
labels = np.zeros([0, 2, 4]).astype(np.float32)
np_loss, _ = self._npXent(features, labels)
with self.test_session(use_gpu=True) as sess:
loss = nn_ops.softmax_cross_entropy_with_logits(
labels=labels, logits=features)
tf_loss = sess.run(loss)
self.assertAllEqual(np_loss, tf_loss)
class XentBenchmark(test.Benchmark):
def benchmarkZeroDimension(self):
for (m, n, p, use_gpu) in itertools.product(
[128],
[10, 100, 1000, 10000, 100000],
[0.001, 0.01, 0.5, 0.99, 1.0],
[False]):
k = int(p * n)
if k == 0:
continue
name = "zero_dimension_m_%d_n_%d_k_%g_use_gpu_%s" % (m, n, k, use_gpu)
device = "/%s:0" % ("gpu" if use_gpu else "cpu")
with ops.Graph().as_default():
with ops.device(device):
labels = array_ops.zeros([0, 2, 4], dtype=dtypes.float32)
logits = array_ops.zeros([0, 2, 4], dtype=dtypes.float32)
op = nn_ops.softmax_cross_entropy_with_logits(
labels=labels, logits=logits)
with session.Session() as sess:
r = self.run_op_benchmark(sess, op, min_iters=100, name=name)
gb_processed_input = m * n / 1.0e9
throughput = gb_processed_input / r["wall_time"]
print("Benchmark: %s \t wall_time: %0.03g s \t "
"Throughput: %0.03g GB/s" % (name, r["wall_time"], throughput))
sys.stdout.flush()
def benchmarkSingleClass(self):
for (m, n, p, use_gpu) in itertools.product(
[128],
[10, 100, 1000, 10000, 100000],
[0.001, 0.01, 0.5, 0.99, 1.0],
[False]):
k = int(p * n)
if k == 0:
continue
name = "single_class_m_%d_n_%d_k_%g_use_gpu_%s" % (m, n, k, use_gpu)
device = "/%s:0" % ("gpu" if use_gpu else "cpu")
with ops.Graph().as_default():
with ops.device(device):
labels = constant_op.constant([[1.], [-1.], [0.]],
dtype=dtypes.float32)
logits = constant_op.constant([[-1.], [0.], [1.]],
dtype=dtypes.float32)
op = nn_ops.softmax_cross_entropy_with_logits(
labels=labels, logits=logits)
with session.Session() as sess:
r = self.run_op_benchmark(sess, op, min_iters=100, name=name)
gb_processed_input = m * n / 1.0e9
throughput = gb_processed_input / r["wall_time"]
print("Benchmark: %s \t wall_time: %0.03g s \t "
"Throughput: %0.03g GB/s" % (name, r["wall_time"], throughput))
sys.stdout.flush()
if __name__ == "__main__":
test.main()
| apache-2.0 |
nwjs/chromium.src | third_party/blink/tools/blinkpy/common/system/system_host_mock.py | 2 | 3073 | # Copyright (c) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from StringIO import StringIO
from blinkpy.common.system.executive_mock import MockExecutive
from blinkpy.common.system.filesystem_mock import MockFileSystem
from blinkpy.common.system.platform_info_mock import MockPlatformInfo
from blinkpy.common.system.user_mock import MockUser
class MockSystemHost(object):
def __init__(self,
log_executive=False,
os_name=None,
os_version=None,
executive=None,
filesystem=None,
time_return_val=123):
self.executable = 'python'
self.executive = executive or MockExecutive(should_log=log_executive)
self.filesystem = filesystem or MockFileSystem()
self.user = MockUser()
self.platform = MockPlatformInfo()
if os_name:
self.platform.os_name = os_name
if os_version:
self.platform.os_version = os_version
self.stdin = StringIO()
self.stdout = StringIO()
self.stderr = StringIO()
self.environ = {
'MOCK_ENVIRON_COPY': '1',
'PATH': '/bin:/mock/bin'
}
self.time_return_val = time_return_val
def time(self):
return self.time_return_val
def sleep(self, seconds):
self.time_return_val += seconds
def print_(self, *args, **kwargs):
sep = kwargs.get('sep', ' ')
end = kwargs.get('end', '\n')
stream = kwargs.get('stream', self.stdout)
stream.write(sep.join([str(arg) for arg in args]) + end)
| bsd-3-clause |
cdorer/crits | crits/indicators/urls.py | 7 | 1184 | from django.conf.urls import patterns
urlpatterns = patterns('crits.indicators.views',
(r'^details/(?P<indicator_id>\w+)/$', 'indicator'),
(r'^search/$', 'indicator_search'),
(r'^upload/$', 'upload_indicator'),
(r'^add_action/$', 'new_indicator_action'),
(r'^remove/(?P<_id>[\S ]+)$', 'remove_indicator'),
(r'^action/remove/(?P<indicator_id>\w+)/$', 'remove_action'),
(r'^activity/remove/(?P<indicator_id>\w+)/$', 'remove_activity'),
(r'^actions/(?P<method>\S+)/(?P<indicator_id>\w+)/$', 'add_update_action'),
(r'^activity/(?P<method>\S+)/(?P<indicator_id>\w+)/$', 'add_update_activity'),
(r'^ci/update/(?P<indicator_id>\w+)/(?P<ci_type>\S+)/$', 'update_ci'),
(r'^type/update/(?P<indicator_id>\w+)/$', 'update_indicator_type'),
(r'^threat_type/update/(?P<indicator_id>\w+)/$', 'update_indicator_threat_type'),
(r'^attack_type/update/(?P<indicator_id>\w+)/$', 'update_indicator_attack_type'),
(r'^and_ip/$', 'indicator_and_ip'),
(r'^from_obj/$', 'indicator_from_tlo'),
(r'^list/$', 'indicators_listing'),
(r'^list/(?P<option>\S+)/$', 'indicators_listing'),
(r'^get_dropdown/$', 'get_indicator_type_dropdown'),
)
| mit |
milkpku/BetaElephant | policy_experiment/policy.orign/dataset.py | 1 | 7607 | #!/usr/bin/python3
#-*-coding:utf-8-*-
#$File: dataset.py
#$Date: Sat May 7 10:59:24 2016
#$Author: Like Ma <milkpku[at]gmail[dot]com>
import copy
import random
import numpy as np
OUT_TYPE = np.float32
class Dataset(object):
def __init__(self, path, _type):
if _type == 'train':
self.__file_object = open(path + '/train.fen', 'r')
if _type == 'validation':
self.__file_object = open(path + '/valid.fen', 'r')
self.__chesslayer = {}
def __init_clayer(self):
# King(帅)*1, Advisor(仕)*2, Bishop(象)*2, kNight(马)*2
# Rook(车)*2, Cannon(炮)*2, Pawn(兵)*5
# Upper: red Lower: black
self.__chesslayer = {'K':0, 'A':1, 'B':3, 'N':5, 'R':7, 'C':9, 'P':11,
'k':0, 'a':1, 'b':3, 'n':5, 'r':7, 'c':9, 'p':11}
def next_batch(self, batch_size):
'''
return [data, label] with batched size
'''
frdpos = np.zeros((batch_size, 9, 10, 16), dtype=OUT_TYPE)
emypos = np.zeros((batch_size, 9, 10, 16), dtype=OUT_TYPE)
frdmove = np.zeros((batch_size, 9, 10, 16), dtype=OUT_TYPE)
emymove = np.zeros((batch_size, 9, 10, 16), dtype=OUT_TYPE)
frdprot = np.zeros((batch_size, 9, 10, 16), dtype=OUT_TYPE)
emyprot = np.zeros((batch_size, 9, 10, 16), dtype=OUT_TYPE)
movelabel = np.zeros((batch_size, 9, 10, 16), dtype=OUT_TYPE)
i = 0
while(i < batch_size):
line = self.__file_object.readline()
if line != '':
if line.split('\t')[2] == 'WIN!':
continue
else:
self.__file_object.seek(0, 0)
line = self.__file_object.readline()
frdpos[i], emypos[i], frdmove[i], emymove[i], frdprot[i], emyprot[i], movelabel[i] = self.__fen2tensor(line)
i += 1
return [frdpos, emypos, frdmove], movelabel
# return [frdpos, frdmove, emypos, emyprot], movelabel
# return [frdpos, emypos, frdmove, emymove, frdprot, emyprot], movelabel
def __fen2tensor(self, fen):
frdpos = np.zeros((9, 10, 16), dtype=OUT_TYPE)
emypos = np.zeros((9, 10, 16), dtype=OUT_TYPE)
frdmove = np.zeros((9, 10, 16), dtype=OUT_TYPE)
emymove = np.zeros((9, 10, 16), dtype=OUT_TYPE)
frdprot = np.zeros((9, 10, 16), dtype=OUT_TYPE)
emyprot = np.zeros((9, 10, 16), dtype=OUT_TYPE)
movelabel = np.zeros((9, 10, 16), dtype=OUT_TYPE)
fenlist = fen.split('\t')
frdpos, emypos = self.__f2tpos(fenlist[0], frdpos, emypos)
frdmove = self.__f2tmove(fenlist[1], frdmove, frdpos)
emymove = self.__f2tmove(fenlist[3], emymove, emypos)
frdprot = self.__f2tmove(fenlist[4], frdprot, frdpos)
emyprot = self.__f2tmove(fenlist[5], emyprot, emypos)
label = fenlist[2].strip().split('-')
layer = np.argmax(frdpos[self.__loca2i(label[0][0])][self.__loca2i(label[0][1])])
movelabel[self.__loca2i(label[1][0])][self.__loca2i(label[1][1])][layer] = 1
if fenlist[0].split()[1] == 'b':
self.__switch_round(frdpos)
self.__switch_round(frdmove)
self.__switch_round(emypos)
self.__switch_round(movelabel)
# shuffle random
self.__shuffle([frdpos, frdmove, movelabel], self.__shuffle_args())
self.__shuffle([emypos], self.__shuffle_args())
return frdpos, emypos, frdmove, emymove, frdprot, emyprot, movelabel
def __f2tpos(self, fen, frdpos, emypos):
self.__init_clayer()
poslist = fen.split()[0].split('/')
player = fen.split()[1]
for i in range(len(poslist)):
item = poslist[9 - i]
index = 0
for j in range(len(item)):
if item[j].isupper():
if player == 'w':
frdpos[index][i][self.__chesslayer[item[j]]] = 1
else:
emypos[index][i][self.__chesslayer[item[j]]] = 1
self.__chesslayer[item[j]] += 1
index += 1
elif item[j].islower():
if player == 'w':
emypos[index][i][self.__chesslayer[item[j]]] = 1
else:
frdpos[index][i][self.__chesslayer[item[j]]] = 1
self.__chesslayer[item[j]] += 1
index += 1
else:
index += int(item[j])
return frdpos, emypos
def __f2tmove(self, movelist, move, pos):
movelist = movelist.split()
for item in movelist:
src = item.split('-')[0]
des = item.split('-')[1]
layer = np.argmax(pos[self.__loca2i(src[0])][self.__loca2i(src[1])])
move[self.__loca2i(des[0])][self.__loca2i(des[1])][layer] = 1
return move
def __loca2i(self, loc):
if loc.isupper():
return ord(loc)-ord('A')
else:
return int(loc)
def __switch_round(self, mat):
mat = mat[:,::-1,:]
def __shuffle(self, mat, args):
index = [[1,2],[3,4],[5,6],[7,8],[9,10],[11,12,13,14,15]]
for item in mat:
for i in range(len(index)):
item[:,:,index[i]] = self.__switch_layer(item[:,:,index[i]], args[i])
def __switch_layer(self, mat, args):
mat_temp = copy.deepcopy(mat)
assert len(args) == mat.shape[2]
for k in range(len(args)):
mat[:,:,k] = mat_temp[:,:,args[k]]
return mat
def __shuffle_args(self):
args = []
for i in range(5):
a = [0,1]
random.shuffle(a)
args.append(a)
seq = [0,1,2,3,4]
random.shuffle(seq)
args.append(seq)
return args
def load_data(_type):
'''
return dataset which yeild minibatch data
'''
data = Dataset('/home/mlk/BetaElephant/data', _type)
return data
def visualdata(data):
print('------------------------')
for i in range(data.shape[2]):
print(i)
for j in range(data.shape[1]):
for k in range(data.shape[0]):
print(int(data[k][9 - j][i])),
print('\n'),
print('\n')
print('------------------------\n')
if __name__ == '__main__':
traindata = load_data('validation')
for i in range(10):
[frdpos, emypos, frdmov, emymove, frdprot, emyprot], movelabel = traindata.next_batch(10)
if 0:
visualdata(frdpos[0])
visualdata(frdmove[0])
visualdata(emypos[0])
for i in range(10):
[frdpos, emypos, frdmove, emymove, frdprot, emyprot], movelabel = traindata.next_batch(100)
# from IPython import embed; embed()
# assert all protected pieces are selfpieces
assert all((frdprot.sum(axis=3)*frdpos.sum(axis=3)==frdprot.sum(axis=3)).reshape(-1))
assert all((emyprot.sum(axis=3)*emypos.sum(axis=3)==emyprot.sum(axis=3)).reshape(-1))
# assert no empty moves
frdmove = frdmove.reshape(frdmove.shape[0], -1)
frdmove = frdmove.sum(axis=1)
assert all(frdmove!=0), print(i, np.argwhere(frdmove==0))
# assert no piece in the same layer
frdpos = frdpos.reshape(frdpos.shape[0]*90, -1)
frdpos = frdpos.sum(axis=1)
assert all(frdpos < 2), print(i, np.argwhere(frdpos>1))
emypos = emypos.reshape(emypos.shape[0]*90, -1)
emypos = emypos.sum(axis=1)
assert all(emypos < 2), print(i, np.argwhere(emypos>1))
| mit |
Dhivyap/ansible | lib/ansible/modules/network/fortios/fortios_log_memory_filter.py | 14 | 20935 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_log_memory_filter
short_description: Filters for memory buffer in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify log_memory feature and filter category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
log_memory_filter:
description:
- Filters for memory buffer.
default: null
type: dict
suboptions:
admin:
description:
- Enable/disable admin login/logout logging.
type: str
choices:
- enable
- disable
anomaly:
description:
- Enable/disable anomaly logging.
type: str
choices:
- enable
- disable
auth:
description:
- Enable/disable firewall authentication logging.
type: str
choices:
- enable
- disable
cpu_memory_usage:
description:
- Enable/disable CPU & memory usage logging every 5 minutes.
type: str
choices:
- enable
- disable
dhcp:
description:
- Enable/disable DHCP service messages logging.
type: str
choices:
- enable
- disable
dns:
description:
- Enable/disable detailed DNS event logging.
type: str
choices:
- enable
- disable
event:
description:
- Enable/disable event logging.
type: str
choices:
- enable
- disable
filter:
description:
- Memory log filter.
type: str
filter_type:
description:
- Include/exclude logs that match the filter.
type: str
choices:
- include
- exclude
forward_traffic:
description:
- Enable/disable forward traffic logging.
type: str
choices:
- enable
- disable
gtp:
description:
- Enable/disable GTP messages logging.
type: str
choices:
- enable
- disable
ha:
description:
- Enable/disable HA logging.
type: str
choices:
- enable
- disable
ipsec:
description:
- Enable/disable IPsec negotiation messages logging.
type: str
choices:
- enable
- disable
ldb_monitor:
description:
- Enable/disable VIP real server health monitoring logging.
type: str
choices:
- enable
- disable
local_traffic:
description:
- Enable/disable local in or out traffic logging.
type: str
choices:
- enable
- disable
multicast_traffic:
description:
- Enable/disable multicast traffic logging.
type: str
choices:
- enable
- disable
netscan_discovery:
description:
- Enable/disable netscan discovery event logging.
type: str
netscan_vulnerability:
description:
- Enable/disable netscan vulnerability event logging.
type: str
pattern:
description:
- Enable/disable pattern update logging.
type: str
choices:
- enable
- disable
ppp:
description:
- Enable/disable L2TP/PPTP/PPPoE logging.
type: str
choices:
- enable
- disable
radius:
description:
- Enable/disable RADIUS messages logging.
type: str
choices:
- enable
- disable
severity:
description:
- Log every message above and including this severity level.
type: str
choices:
- emergency
- alert
- critical
- error
- warning
- notification
- information
- debug
sniffer_traffic:
description:
- Enable/disable sniffer traffic logging.
type: str
choices:
- enable
- disable
ssh:
description:
- Enable/disable SSH logging.
type: str
choices:
- enable
- disable
sslvpn_log_adm:
description:
- Enable/disable SSL administrator login logging.
type: str
choices:
- enable
- disable
sslvpn_log_auth:
description:
- Enable/disable SSL user authentication logging.
type: str
choices:
- enable
- disable
sslvpn_log_session:
description:
- Enable/disable SSL session logging.
type: str
choices:
- enable
- disable
system:
description:
- Enable/disable system activity logging.
type: str
choices:
- enable
- disable
vip_ssl:
description:
- Enable/disable VIP SSL logging.
type: str
choices:
- enable
- disable
voip:
description:
- Enable/disable VoIP logging.
type: str
choices:
- enable
- disable
wan_opt:
description:
- Enable/disable WAN optimization event logging.
type: str
choices:
- enable
- disable
wireless_activity:
description:
- Enable/disable wireless activity event logging.
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Filters for memory buffer.
fortios_log_memory_filter:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
log_memory_filter:
admin: "enable"
anomaly: "enable"
auth: "enable"
cpu_memory_usage: "enable"
dhcp: "enable"
dns: "enable"
event: "enable"
filter: "<your_own_value>"
filter_type: "include"
forward_traffic: "enable"
gtp: "enable"
ha: "enable"
ipsec: "enable"
ldb_monitor: "enable"
local_traffic: "enable"
multicast_traffic: "enable"
netscan_discovery: "<your_own_value>"
netscan_vulnerability: "<your_own_value>"
pattern: "enable"
ppp: "enable"
radius: "enable"
severity: "emergency"
sniffer_traffic: "enable"
ssh: "enable"
sslvpn_log_adm: "enable"
sslvpn_log_auth: "enable"
sslvpn_log_session: "enable"
system: "enable"
vip_ssl: "enable"
voip: "enable"
wan_opt: "enable"
wireless_activity: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_log_memory_filter_data(json):
option_list = ['admin', 'anomaly', 'auth',
'cpu_memory_usage', 'dhcp', 'dns',
'event', 'filter', 'filter_type',
'forward_traffic', 'gtp', 'ha',
'ipsec', 'ldb_monitor', 'local_traffic',
'multicast_traffic', 'netscan_discovery', 'netscan_vulnerability',
'pattern', 'ppp', 'radius',
'severity', 'sniffer_traffic', 'ssh',
'sslvpn_log_adm', 'sslvpn_log_auth', 'sslvpn_log_session',
'system', 'vip_ssl', 'voip',
'wan_opt', 'wireless_activity']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def log_memory_filter(data, fos):
vdom = data['vdom']
log_memory_filter_data = data['log_memory_filter']
filtered_data = underscore_to_hyphen(filter_log_memory_filter_data(log_memory_filter_data))
return fos.set('log.memory',
'filter',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_log_memory(data, fos):
if data['log_memory_filter']:
resp = log_memory_filter(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"log_memory_filter": {
"required": False, "type": "dict", "default": None,
"options": {
"admin": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"anomaly": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"auth": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"cpu_memory_usage": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dhcp": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dns": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"event": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"filter": {"required": False, "type": "str"},
"filter_type": {"required": False, "type": "str",
"choices": ["include", "exclude"]},
"forward_traffic": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"gtp": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ha": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ipsec": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ldb_monitor": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"local_traffic": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"multicast_traffic": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"netscan_discovery": {"required": False, "type": "str"},
"netscan_vulnerability": {"required": False, "type": "str"},
"pattern": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ppp": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"radius": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"severity": {"required": False, "type": "str",
"choices": ["emergency", "alert", "critical",
"error", "warning", "notification",
"information", "debug"]},
"sniffer_traffic": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ssh": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"sslvpn_log_adm": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"sslvpn_log_auth": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"sslvpn_log_session": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"system": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"vip_ssl": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"voip": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"wan_opt": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"wireless_activity": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_log_memory(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_log_memory(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.