repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
marc-sensenich/ansible | lib/ansible/module_utils/network/ftd/common.py | 22 | 6027 | # Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import re
from ansible.module_utils._text import to_text
from ansible.module_utils.common.collections import is_string
INVALID_IDENTIFIER_SYMBOLS = r'[^a-zA-Z0-9_]'
IDENTITY_PROPERTIES = ['id', 'version', 'ruleId']
NON_COMPARABLE_PROPERTIES = IDENTITY_PROPERTIES + ['isSystemDefined', 'links']
class HTTPMethod:
GET = 'get'
POST = 'post'
PUT = 'put'
DELETE = 'delete'
class ResponseParams:
SUCCESS = 'success'
STATUS_CODE = 'status_code'
RESPONSE = 'response'
class FtdConfigurationError(Exception):
def __init__(self, msg, obj=None):
super(FtdConfigurationError, self).__init__(msg)
self.msg = msg
self.obj = obj
class FtdServerError(Exception):
def __init__(self, response, code):
super(FtdServerError, self).__init__(response)
self.response = response
self.code = code
class FtdUnexpectedResponse(Exception):
"""The exception to be raised in case of unexpected responses from 3d parties."""
pass
def construct_ansible_facts(response, params):
facts = dict()
if response:
response_body = response['items'] if 'items' in response else response
if params.get('register_as'):
facts[params['register_as']] = response_body
elif 'name' in response_body and 'type' in response_body:
object_name = re.sub(INVALID_IDENTIFIER_SYMBOLS, '_', response_body['name'].lower())
fact_name = '%s_%s' % (response_body['type'], object_name)
facts[fact_name] = response_body
return facts
def copy_identity_properties(source_obj, dest_obj):
for property_name in IDENTITY_PROPERTIES:
if property_name in source_obj:
dest_obj[property_name] = source_obj[property_name]
return dest_obj
def is_object_ref(d):
"""
Checks if a dictionary is a reference object. The dictionary is considered to be a
reference object when it contains non-empty 'id' and 'type' fields.
:type d: dict
:return: True if passed dictionary is a reference object, otherwise False
"""
has_id = 'id' in d.keys() and d['id']
has_type = 'type' in d.keys() and d['type']
return has_id and has_type
def equal_object_refs(d1, d2):
"""
Checks whether two references point to the same object.
:type d1: dict
:type d2: dict
:return: True if passed references point to the same object, otherwise False
"""
have_equal_ids = d1['id'] == d2['id']
have_equal_types = d1['type'] == d2['type']
return have_equal_ids and have_equal_types
def equal_lists(l1, l2):
"""
Checks whether two lists are equal. The order of elements in the arrays is important.
:type l1: list
:type l2: list
:return: True if passed lists, their elements and order of elements are equal. Otherwise, returns False.
"""
if len(l1) != len(l2):
return False
for v1, v2 in zip(l1, l2):
if not equal_values(v1, v2):
return False
return True
def equal_dicts(d1, d2, compare_by_reference=True):
"""
Checks whether two dictionaries are equal. If `compare_by_reference` is set to True, dictionaries referencing
objects are compared using `equal_object_refs` method. Otherwise, every key and value is checked.
:type d1: dict
:type d2: dict
:param compare_by_reference: if True, dictionaries referencing objects are compared using `equal_object_refs` method
:return: True if passed dicts are equal. Otherwise, returns False.
"""
if compare_by_reference and is_object_ref(d1) and is_object_ref(d2):
return equal_object_refs(d1, d2)
if len(d1) != len(d2):
return False
for key, v1 in d1.items():
if key not in d2:
return False
v2 = d2[key]
if not equal_values(v1, v2):
return False
return True
def equal_values(v1, v2):
"""
Checks whether types and content of two values are the same. In case of complex objects, the method might be
called recursively.
:param v1: first value
:param v2: second value
:return: True if types and content of passed values are equal. Otherwise, returns False.
:rtype: bool
"""
# string-like values might have same text but different types, so checking them separately
if is_string(v1) and is_string(v2):
return to_text(v1) == to_text(v2)
if type(v1) != type(v2):
return False
value_type = type(v1)
if value_type == list:
return equal_lists(v1, v2)
elif value_type == dict:
return equal_dicts(v1, v2)
else:
return v1 == v2
def equal_objects(d1, d2):
"""
Checks whether two objects are equal. Ignores special object properties (e.g. 'id', 'version') and
properties with None and empty values. In case properties contains a reference to the other object,
only object identities (ids and types) are checked.
:type d1: dict
:type d2: dict
:return: True if passed objects and their properties are equal. Otherwise, returns False.
"""
d1 = dict((k, d1[k]) for k in d1.keys() if k not in NON_COMPARABLE_PROPERTIES and d1[k])
d2 = dict((k, d2[k]) for k in d2.keys() if k not in NON_COMPARABLE_PROPERTIES and d2[k])
return equal_dicts(d1, d2, compare_by_reference=False)
| gpl-3.0 |
wisechengyi/pants | src/python/pants/util/collections.py | 1 | 3201 | # Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import collections
import collections.abc
from typing import Any, Callable, DefaultDict, Iterable, List, MutableMapping, Type, TypeVar, Union
_K = TypeVar("_K")
_V = TypeVar("_V")
def factory_dict(value_factory: Callable[[_K], _V], *args, **kwargs) -> DefaultDict:
"""A dict whose values are computed by `value_factory` when a `__getitem__` key is missing.
Note that values retrieved by any other method will not be lazily computed; eg: via `get`.
:param value_factory:
:param *args: Any positional args to pass through to `dict`.
:param **kwrags: Any kwargs to pass through to `dict`.
"""
class FactoryDict(collections.defaultdict):
@staticmethod
def __never_called():
raise AssertionError(
"The default factory should never be called since we override " "__missing__."
)
def __init__(self):
super().__init__(self.__never_called, *args, **kwargs)
def __missing__(self, key):
value = value_factory(key)
self[key] = value
return value
return FactoryDict()
def recursively_update(d: MutableMapping, d2: MutableMapping) -> None:
"""dict.update but which merges child dicts (dict2 takes precedence where there's conflict)."""
for k, v in d2.items():
if k in d:
if isinstance(v, dict):
recursively_update(d[k], v)
continue
d[k] = v
_T = TypeVar("_T")
def assert_single_element(iterable: Iterable[_T]) -> _T:
"""Get the single element of `iterable`, or raise an error.
:raise: :class:`StopIteration` if there is no element.
:raise: :class:`ValueError` if there is more than one element.
"""
it = iter(iterable)
first_item = next(it)
try:
next(it)
except StopIteration:
return first_item
raise ValueError(f"iterable {iterable!r} has more than one element.")
def ensure_list(val: Union[Any, Iterable[Any]], *, expected_type: Type[_T]) -> List[_T]:
"""Given either a single value or an iterable of values, always return a list.
This performs runtime type checking to ensure that every element of the list is the expected
type.
"""
if isinstance(val, expected_type):
return [val]
if not isinstance(val, collections.abc.Iterable):
raise ValueError(
f"The value {val} (type {type(val)}) did not have the expected type {expected_type} "
"nor was it an iterable."
)
result: List[_T] = []
for i, x in enumerate(val):
if not isinstance(x, expected_type):
raise ValueError(
f"Not all elements of the iterable have type {expected_type}. Encountered the "
f"element {x} of type {type(x)} at index {i}."
)
result.append(x)
return result
def ensure_str_list(val: Union[str, Iterable[str]]) -> List[str]:
"""Given either a single string or an iterable of strings, always return a list."""
return ensure_list(val, expected_type=str)
| apache-2.0 |
imsut/commons | src/python/twitter/common/http/mirror_file.py | 2 | 2677 | import errno
import httplib
import os
import socket
import time
class MirrorFile(object):
def __init__(self, http_host, http_path, local_file, https=False):
"""
Given a file pointed to by 'url', mirror it to 'local_file', providing operations
to check that it's up to date.
"""
self._http_path = http_path
self._http_host = http_host
self._local_filename = local_file
self._connection_class = httplib.HTTPSConnection if https else httplib.HTTPConnection
self._local_mtime = None
self._web_mtime = None
self._exists = os.path.exists(local_file)
def _get_local_timestamp(self):
try:
stat = os.stat(self._local_filename)
return stat.st_mtime
except OSError as e:
if e.errno == errno.ENOENT:
self._local_mtime = None
else:
# File is inaccessible.
raise
return None
def _get_web_timestamp(self):
# TODO(wickman) Wrap this in an expontential backoff.
conn = self._connection_class(self._http_host)
try:
conn.request('HEAD', self._http_path)
except (httplib.CannotSendRequest, socket.error):
return None
try:
res = conn.getresponse()
except (httplib.ResponseNotReady, httplib.BadStatusLine):
return None
if res is not None:
last_modified = res.getheader('last-modified')
if last_modified is not None:
try:
last_modified = time.strptime(last_modified, '%a, %d %b %Y %H:%M:%S %Z')
except ValueError:
return None
return int(time.mktime(last_modified))
return None
def filename(self):
if not self._exists:
ioe = IOError('%s does not exist' % self._local_filename)
ioe.errno = errno.ENOENT
raise ioe
return self._local_filename
def refresh(self):
"""
Refresh the local file if necessary. Returns truthy if the underlying file changed.
"""
self._local_mtime = self._get_local_timestamp()
self._web_mtime = self._get_web_timestamp()
if self._web_mtime is None:
return None
else:
if self._web_mtime != self._local_mtime:
return self._fetch()
def _fetch(self):
conn = self._connection_class(self._http_host)
try:
conn.request('GET', self._http_path)
except (httplib.CannotSendRequest, socket.error):
return None
try:
res = conn.getresponse()
except (httplib.ResponseNotReady, httplib.BadStatusLine):
return None
if res is not None:
with open(self._local_filename, 'w') as fp:
fp.write(res.read())
os.utime(self._local_filename, (self._web_mtime, self._web_mtime))
self._exists = True
return True
| apache-2.0 |
uclouvain/osis | base/tests/views/learning_units/external/test_update.py | 1 | 4921 | ############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
############################################################################
from django.contrib.messages import get_messages, SUCCESS
from django.test import TestCase
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from waffle.testutils import override_flag
from base.models.enums.entity_type import FACULTY
from base.models.enums.learning_container_year_types import EXTERNAL
from base.models.enums.organization_type import MAIN
from base.tests.factories.academic_calendar import generate_learning_unit_edition_calendars
from base.tests.factories.academic_year import create_current_academic_year
from base.tests.factories.entity import EntityWithVersionFactory
from base.tests.factories.external_learning_unit_year import ExternalLearningUnitYearFactory
from base.tests.factories.learning_unit_year import LearningUnitYearFullFactory
from base.tests.factories.person import PersonFactory
from base.tests.forms.test_external_learning_unit import get_valid_external_learning_unit_form_data
from base.views.learning_units.update import update_learning_unit
from learning_unit.tests.factories.central_manager import CentralManagerFactory
@override_flag('learning_unit_update', active=True)
class TestUpdateExternalLearningUnitView(TestCase):
@classmethod
def setUpTestData(cls):
cls.entity = EntityWithVersionFactory(organization__type=MAIN, version__entity_type=FACULTY)
cls.manager = CentralManagerFactory(entity=cls.entity, with_child=True)
cls.person = cls.manager.person
cls.academic_year = create_current_academic_year()
generate_learning_unit_edition_calendars([cls.academic_year])
cls.luy = LearningUnitYearFullFactory(
academic_year=cls.academic_year,
internship_subtype=None,
acronym="EFAC1000",
learning_container_year__container_type=EXTERNAL,
learning_container_year__requirement_entity=cls.entity,
learning_container_year__allocation_entity=cls.entity,
)
cls.data = get_valid_external_learning_unit_form_data(cls.academic_year, cls.luy, cls.entity)
cls.url = reverse(update_learning_unit, args=[cls.luy.pk])
def setUp(self):
self.external = ExternalLearningUnitYearFactory(learning_unit_year=self.luy)
self.client.force_login(self.person.user)
def test_update_get(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
def test_update_get_permission_denied(self):
self.client.force_login(PersonFactory().user)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 403)
def test_update_post(self):
response = self.client.post(self.url, data=self.data)
self.assertEqual(response.status_code, 302)
messages = [m.level for m in get_messages(response.wsgi_request)]
self.assertEqual(messages, [SUCCESS])
def test_update_message_with_report(self):
self.data['postponement'] = "1"
response = self.client.post(self.url, data=self.data)
self.assertEqual(response.status_code, 302)
messages = [m.message for m in get_messages(response.wsgi_request)]
self.assertEqual(messages[0], _("The learning unit has been updated (with report)."))
def test_update_message_without_report(self):
self.data['postponement'] = "0"
response = self.client.post(self.url, data=self.data)
self.assertEqual(response.status_code, 302)
messages = [m.message for m in get_messages(response.wsgi_request)]
self.assertEqual(messages[0], _("The learning unit has been updated (without report)."))
| agpl-3.0 |
bastik/youtube-dl | youtube_dl/extractor/eagleplatform.py | 65 | 3468 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,
)
class EaglePlatformIE(InfoExtractor):
_VALID_URL = r'''(?x)
(?:
eagleplatform:(?P<custom_host>[^/]+):|
https?://(?P<host>.+?\.media\.eagleplatform\.com)/index/player\?.*\brecord_id=
)
(?P<id>\d+)
'''
_TESTS = [{
# http://lenta.ru/news/2015/03/06/navalny/
'url': 'http://lentaru.media.eagleplatform.com/index/player?player=new&record_id=227304&player_template_id=5201',
'md5': '0b7994faa2bd5c0f69a3db6db28d078d',
'info_dict': {
'id': '227304',
'ext': 'mp4',
'title': 'Навальный вышел на свободу',
'description': 'md5:d97861ac9ae77377f3f20eaf9d04b4f5',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 87,
'view_count': int,
'age_limit': 0,
},
}, {
# http://muz-tv.ru/play/7129/
# http://media.clipyou.ru/index/player?record_id=12820&width=730&height=415&autoplay=true
'url': 'eagleplatform:media.clipyou.ru:12820',
'md5': '6c2ebeab03b739597ce8d86339d5a905',
'info_dict': {
'id': '12820',
'ext': 'mp4',
'title': "'O Sole Mio",
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 216,
'view_count': int,
},
'skip': 'Georestricted',
}]
def _handle_error(self, response):
status = int_or_none(response.get('status', 200))
if status != 200:
raise ExtractorError(' '.join(response['errors']), expected=True)
def _download_json(self, url_or_request, video_id, note='Downloading JSON metadata'):
response = super(EaglePlatformIE, self)._download_json(url_or_request, video_id, note)
self._handle_error(response)
return response
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
host, video_id = mobj.group('custom_host') or mobj.group('host'), mobj.group('id')
player_data = self._download_json(
'http://%s/api/player_data?id=%s' % (host, video_id), video_id)
media = player_data['data']['playlist']['viewports'][0]['medialist'][0]
title = media['title']
description = media.get('description')
thumbnail = media.get('snapshot')
duration = int_or_none(media.get('duration'))
view_count = int_or_none(media.get('views'))
age_restriction = media.get('age_restriction')
age_limit = None
if age_restriction:
age_limit = 0 if age_restriction == 'allow_all' else 18
m3u8_data = self._download_json(
media['sources']['secure_m3u8']['auto'],
video_id, 'Downloading m3u8 JSON')
formats = self._extract_m3u8_formats(
m3u8_data['data'][0], video_id,
'mp4', entry_protocol='m3u8_native')
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'view_count': view_count,
'age_limit': age_limit,
'formats': formats,
}
| unlicense |
abaldwin1/thumbor | tests/loaders/test_https_loader.py | 2 | 7454 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
from os.path import abspath, join, dirname
from preggy import expect
import mock
# from tornado.concurrent import Future
import tornado.web
from tests.base import PythonTestCase, TestCase
from tornado.concurrent import Future
import thumbor.loaders.https_loader as loader
from thumbor.context import Context
from thumbor.config import Config
from thumbor.loaders import LoaderResult
def fixture_for(filename):
return abspath(join(dirname(__file__), 'fixtures', filename))
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write('Hello')
class EchoUserAgentHandler(tornado.web.RequestHandler):
def get(self):
self.write(self.request.headers['User-Agent'])
class HandlerMock(object):
def __init__(self, headers):
self.request = RequestMock(headers)
class RequestMock(object):
def __init__(self, headers):
self.headers = headers
class ResponseMock:
def __init__(self, error=None, content_type=None, body=None, code=None):
self.error = error
self.code = code
self.time_info = None
self.headers = {
'Content-Type': 'image/jpeg'
}
if content_type:
self.headers['Content-Type'] = content_type
self.body = body
class ReturnContentTestCase(PythonTestCase):
def test_return_none_on_error(self):
response_mock = ResponseMock(error='Error', code=599)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
def test_return_body_if_valid(self):
response_mock = ResponseMock(body='body', code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('body')
def test_return_upstream_error_on_body_none(self):
response_mock = ResponseMock(body=None, code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
expect(result.error).to_equal(LoaderResult.ERROR_UPSTREAM)
def test_return_upstream_error_on_body_empty(self):
response_mock = ResponseMock(body='', code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
expect(result.error).to_equal(LoaderResult.ERROR_UPSTREAM)
class ValidateUrlTestCase(PythonTestCase):
def test_with_allowed_sources(self):
config = Config()
config.ALLOWED_SOURCES = ['s.glbimg.com']
ctx = Context(None, config, None)
expect(
loader.validate(
ctx,
'http://www.google.com/logo.jpg'
)
).to_be_false()
expect(
loader.validate(
ctx,
'http://s2.glbimg.com/logo.jpg'
)
).to_be_false()
expect(
loader.validate(
ctx,
'/glob=:sfoir%20%20%3Co-pmb%20%20%20%20_%20%20%20%200%20%20g.-%3E%3Ca%20hplass='
)
).to_be_false()
expect(
loader.validate(ctx, 'http://s.glbimg.com/logo.jpg')).to_be_true()
def test_without_allowed_sources(self):
config = Config()
config.ALLOWED_SOURCES = []
ctx = Context(None, config, None)
is_valid = loader.validate(ctx, 'http://www.google.com/logo.jpg')
expect(is_valid).to_be_true()
class NormalizeUrlTestCase(PythonTestCase):
def test_should_normalize_url(self):
expect(loader._normalize_url('http://some.url')).to_equal('http://some.url')
expect(loader._normalize_url('some.url')).to_equal('https://some.url')
def test_should_normalize_quoted_url(self):
url = 'https%3A//www.google.ca/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png'
expected = 'https://www.google.ca/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png'
result = loader._normalize_url(url)
expect(result).to_equal(expected)
class HttpsLoaderTestCase(TestCase):
def get_app(self):
application = tornado.web.Application([
(r"/", MainHandler),
])
return application
def test_load_with_callback(self):
url = self.get_url('/')
config = Config()
ctx = Context(None, config, None)
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('Hello')
expect(result.successful).to_be_true()
def test_load_with_curl(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_CURL_ASYNC_HTTP_CLIENT = True
ctx = Context(None, config, None)
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('Hello')
expect(result.successful).to_be_true()
def test_should_return_a_future(self):
url = self.get_url('/')
config = Config()
ctx = Context(None, config, None)
future = loader.load(ctx, url)
expect(isinstance(future, Future)).to_be_true()
class HttpLoaderWithUserAgentForwardingTestCase(TestCase):
def get_app(self):
application = tornado.web.Application([
(r"/", EchoUserAgentHandler),
])
return application
def test_load_with_user_agent(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_FORWARD_USER_AGENT = True
ctx = Context(None, config, None, HandlerMock({"User-Agent": "test-user-agent"}))
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('test-user-agent')
def test_load_with_default_user_agent(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_FORWARD_USER_AGENT = True
config.HTTP_LOADER_DEFAULT_USER_AGENT = "DEFAULT_USER_AGENT"
ctx = Context(None, config, None, HandlerMock({}))
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('DEFAULT_USER_AGENT')
| mit |
payeldillip/django | django/contrib/gis/sitemaps/views.py | 341 | 2421 | from __future__ import unicode_literals
from django.apps import apps
from django.contrib.gis.db.models.fields import GeometryField
from django.contrib.gis.db.models.functions import AsKML, Transform
from django.contrib.gis.shortcuts import render_to_kml, render_to_kmz
from django.core.exceptions import FieldDoesNotExist
from django.db import DEFAULT_DB_ALIAS, connections
from django.http import Http404
def kml(request, label, model, field_name=None, compress=False, using=DEFAULT_DB_ALIAS):
"""
This view generates KML for the given app label, model, and field name.
The model's default manager must be GeoManager, and the field name
must be that of a geographic field.
"""
placemarks = []
try:
klass = apps.get_model(label, model)
except LookupError:
raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model))
if field_name:
try:
field = klass._meta.get_field(field_name)
if not isinstance(field, GeometryField):
raise FieldDoesNotExist
except FieldDoesNotExist:
raise Http404('Invalid geometry field.')
connection = connections[using]
if connection.features.has_AsKML_function:
# Database will take care of transformation.
placemarks = klass._default_manager.using(using).annotate(kml=AsKML(field_name))
else:
# If the database offers no KML method, we use the `kml`
# attribute of the lazy geometry instead.
placemarks = []
if connection.features.has_Transform_function:
qs = klass._default_manager.using(using).annotate(
**{'%s_4326' % field_name: Transform(field_name, 4326)})
field_name += '_4326'
else:
qs = klass._default_manager.using(using).all()
for mod in qs:
mod.kml = getattr(mod, field_name).kml
placemarks.append(mod)
# Getting the render function and rendering to the correct.
if compress:
render = render_to_kmz
else:
render = render_to_kml
return render('gis/kml/placemarks.kml', {'places': placemarks})
def kmz(request, label, model, field_name=None, using=DEFAULT_DB_ALIAS):
"""
This view returns KMZ for the given app label, model, and field name.
"""
return kml(request, label, model, field_name, compress=True, using=using)
| bsd-3-clause |
trezor/micropython | ports/nrf/examples/ssd1306_mod.py | 3 | 1591 | # NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306_mod import SSD1306_I2C_Mod
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
SET_COL_ADDR = const(0x21)
SET_PAGE_ADDR = const(0x22)
class SSD1306_I2C_Mod(SSD1306_I2C):
def show(self):
x0 = 0
x1 = self.width - 1
if self.width == 64:
# displays with width of 64 pixels are shifted by 32
x0 += 32
x1 += 32
self.write_cmd(SET_COL_ADDR)
self.write_cmd(x0)
self.write_cmd(x1)
self.write_cmd(SET_PAGE_ADDR)
self.write_cmd(0)
self.write_cmd(self.pages - 1)
chunk_size = 254 # 255, excluding opcode.
num_of_chunks = len(self.buffer) // chunk_size
leftover = len(self.buffer) - (num_of_chunks * chunk_size)
for i in range(0, num_of_chunks):
self.write_data(self.buffer[chunk_size*i:chunk_size*(i+1)])
if (leftover > 0):
self.write_data(self.buffer[chunk_size * num_of_chunks:])
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
| mit |
mamachanko/lymph | lymph/core/connection.py | 8 | 4696 | # -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import gevent
import math
import os
import time
import logging
from lymph.utils import SampleWindow
from lymph.exceptions import RpcError
logger = logging.getLogger(__name__)
UNKNOWN = 'unknown'
RESPONSIVE = 'responsive'
UNRESPONSIVE = 'unresponsive'
CLOSED = 'closed'
IDLE = 'idle'
class Connection(object):
def __init__(self, server, endpoint, heartbeat_interval=1, timeout=3, idle_timeout=10, unresponsive_disconnect=30, idle_disconnect=60):
assert heartbeat_interval < timeout < idle_timeout
self.server = server
self.endpoint = endpoint
self.timeout = timeout
self.heartbeat_interval = heartbeat_interval
self.idle_timeout = idle_timeout
self.unresponsive_disconnect = unresponsive_disconnect
self.idle_disconnect = idle_disconnect
now = time.monotonic()
self.last_seen = 0
self.idle_since = 0
self.last_message = now
self.created_at = now
self.heartbeat_samples = SampleWindow(100, factor=1000) # milliseconds
self.explicit_heartbeat_count = 0
self.status = UNKNOWN
self.received_message_count = 0
self.sent_message_count = 0
self.heartbeat_loop_greenlet = self.server.spawn(self.heartbeat_loop)
self.live_check_loop_greenlet = self.server.spawn(self.live_check_loop)
self.pid = os.getpid()
def __str__(self):
return "connection to=%s last_seen=%s" % (self.endpoint, self._dt())
def _dt(self):
return time.monotonic() - self.last_seen
@property
def phi(self):
p = self.heartbeat_samples.p(self._dt())
if p == 0:
return float('inf')
return -math.log10(p)
def set_status(self, status):
self.status = status
def heartbeat_loop(self):
while True:
start = time.monotonic()
channel = self.server.ping(self.endpoint)
error = False
try:
channel.get(timeout=self.heartbeat_interval)
except RpcError as e:
logger.debug('hearbeat error on %s: %r', self, e)
error = True
took = time.monotonic() - start
if not error:
self.heartbeat_samples.add(took)
self.explicit_heartbeat_count += 1
gevent.sleep(max(0, self.heartbeat_interval - took))
def live_check_loop(self):
while True:
self.update_status()
self.log_stats()
gevent.sleep(1)
def update_status(self):
if self.last_seen:
now = time.monotonic()
if now - self.last_seen >= self.timeout:
self.set_status(UNRESPONSIVE)
elif now - self.last_message >= self.idle_timeout:
self.set_status(IDLE)
self.idle_since = now
else:
self.set_status(RESPONSIVE)
def log_stats(self):
roundtrip_stats = 'window (mean rtt={mean:.1f} ms; stddev rtt={stddev:.1f})'.format(**self.heartbeat_samples.stats)
roundtrip_total_stats = 'total (mean rtt={mean:.1f} ms; stddev rtt={stddev:.1f})'.format(**self.heartbeat_samples.total.stats)
logger.debug("pid=%s; endpoint=%s; %s; %s; phi=%.3f; ping/s=%.2f; status=%s" % (
self.pid,
self.endpoint,
roundtrip_stats,
roundtrip_total_stats,
self.phi,
self.explicit_heartbeat_count / max(1, time.monotonic() - self.created_at),
self.status,
))
def close(self):
if self.status == CLOSED:
return
self.status = CLOSED
self.heartbeat_loop_greenlet.kill()
self.live_check_loop_greenlet.kill()
self.server.disconnect(self.endpoint)
def on_recv(self, msg):
now = time.monotonic()
self.last_seen = now
if not msg.is_idle_chatter():
self.last_message = now
self.received_message_count += 1
def on_send(self, msg):
if not msg.is_idle_chatter():
self.last_message = time.monotonic()
self.sent_message_count += 1
def is_alive(self):
return self.status in (RESPONSIVE, IDLE, UNKNOWN)
def stats(self):
# FIXME: rtt and phi should be recorded as summary/histogram for all connections
return {
'endpoint': self.endpoint,
'rtt': self.heartbeat_samples.stats,
'phi': self.phi,
'status': self.status,
'sent': self.sent_message_count,
'received': self.received_message_count,
}
| apache-2.0 |
GoogleCloudPlatform/training-data-analyst | courses/data-engineering/kubeflow-examples/mnist/testing/conftest.py | 2 | 3215 | import os
import pytest
def pytest_addoption(parser):
parser.addoption(
"--tfjob_name", help="Name for the TFjob.",
type=str, default="mnist-test-" + os.getenv('BUILD_ID'))
parser.addoption(
"--namespace", help=("The namespace to run in. This should correspond to"
"a namespace associated with a Kubeflow namespace."),
type=str, default="kubeflow-kubeflow-testing")
parser.addoption(
"--repos", help="The repos to checkout; leave blank to use defaults",
type=str, default="")
parser.addoption(
"--trainer_image", help="TFJob training image",
type=str, default="gcr.io/kubeflow-examples/mnist/model:build-" + os.getenv('BUILD_ID'))
parser.addoption(
"--train_steps", help="train steps for mnist testing",
type=str, default="200")
parser.addoption(
"--batch_size", help="batch size for mnist trainning",
type=str, default="100")
parser.addoption(
"--learning_rate", help="mnist learnning rate",
type=str, default="0.01")
parser.addoption(
"--num_ps", help="The number of PS",
type=str, default="1")
parser.addoption(
"--num_workers", help="The number of Worker",
type=str, default="2")
parser.addoption(
"--model_dir", help="Path for model saving",
type=str, default="gs://kubeflow-ci-deployment_ci-temp/mnist/models/" + os.getenv('BUILD_ID'))
parser.addoption(
"--export_dir", help="Path for model exporting",
type=str, default="gs://kubeflow-ci-deployment_ci-temp/mnist/models/" + os.getenv('BUILD_ID'))
parser.addoption(
"--deploy_name", help="Name for the service deployment",
type=str, default="mnist-test-" + os.getenv('BUILD_ID'))
parser.addoption(
"--master", action="store", default="", help="IP address of GKE master")
parser.addoption(
"--service", action="store", default="mnist-test-" + os.getenv('BUILD_ID'),
help="The name of the mnist K8s service")
@pytest.fixture
def master(request):
return request.config.getoption("--master")
@pytest.fixture
def namespace(request):
return request.config.getoption("--namespace")
@pytest.fixture
def service(request):
return request.config.getoption("--service")
@pytest.fixture
def tfjob_name(request):
return request.config.getoption("--tfjob_name")
@pytest.fixture
def repos(request):
return request.config.getoption("--repos")
@pytest.fixture
def trainer_image(request):
return request.config.getoption("--trainer_image")
@pytest.fixture
def train_steps(request):
return request.config.getoption("--train_steps")
@pytest.fixture
def batch_size(request):
return request.config.getoption("--batch_size")
@pytest.fixture
def learning_rate(request):
return request.config.getoption("--learning_rate")
@pytest.fixture
def num_ps(request):
return request.config.getoption("--num_ps")
@pytest.fixture
def num_workers(request):
return request.config.getoption("--num_workers")
@pytest.fixture
def model_dir(request):
return request.config.getoption("--model_dir")
@pytest.fixture
def export_dir(request):
return request.config.getoption("--export_dir")
@pytest.fixture
def deploy_name(request):
return request.config.getoption("--deploy_name")
| apache-2.0 |
devdelay/home-assistant | homeassistant/util/__init__.py | 1 | 13534 | """Helper methods for various modules."""
from collections.abc import MutableSet
from itertools import chain
import threading
import queue
from datetime import datetime
import re
import enum
import socket
import random
import string
from functools import wraps
from types import MappingProxyType
from typing import Any, Sequence
from .dt import as_local, utcnow
RE_SANITIZE_FILENAME = re.compile(r'(~|\.\.|/|\\)')
RE_SANITIZE_PATH = re.compile(r'(~|\.(\.)+)')
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
def sanitize_filename(filename):
r"""Sanitize a filename by removing .. / and \\."""
return RE_SANITIZE_FILENAME.sub("", filename)
def sanitize_path(path):
"""Sanitize a path by removing ~ and .."""
return RE_SANITIZE_PATH.sub("", path)
def slugify(text: str) -> str:
"""Slugify a given text."""
text = text.lower().replace(" ", "_")
return RE_SLUGIFY.sub("", text)
def repr_helper(inp: Any) -> str:
"""Help creating a more readable string representation of objects."""
if isinstance(inp, (dict, MappingProxyType)):
return ", ".join(
repr_helper(key)+"="+repr_helper(item) for key, item
in inp.items())
elif isinstance(inp, datetime):
return as_local(inp).isoformat()
else:
return str(inp)
def convert(value, to_type, default=None):
"""Convert value to to_type, returns default if fails."""
try:
return default if value is None else to_type(value)
except (ValueError, TypeError):
# If value could not be converted
return default
def ensure_unique_string(preferred_string: str,
current_strings: Sequence[str]) -> str:
"""Return a string that is not present in current_strings.
If preferred string exists will append _2, _3, ..
"""
test_string = preferred_string
current_strings_set = set(current_strings)
tries = 1
while test_string in current_strings_set:
tries += 1
test_string = "{}_{}".format(preferred_string, tries)
return test_string
# Taken from: http://stackoverflow.com/a/11735897
def get_local_ip():
"""Try to determine the local IP address of the machine."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Use Google Public DNS server to determine own IP
sock.connect(('8.8.8.8', 80))
return sock.getsockname()[0]
except socket.error:
return socket.gethostbyname(socket.gethostname())
finally:
sock.close()
# Taken from http://stackoverflow.com/a/23728630
def get_random_string(length=10):
"""Return a random string with letters and digits."""
generator = random.SystemRandom()
source_chars = string.ascii_letters + string.digits
return ''.join(generator.choice(source_chars) for _ in range(length))
class OrderedEnum(enum.Enum):
"""Taken from Python 3.4.0 docs."""
# pylint: disable=no-init, too-few-public-methods
def __ge__(self, other):
"""Return the greater than element."""
if self.__class__ is other.__class__:
return self.value >= other.value
return NotImplemented
def __gt__(self, other):
"""Return the greater element."""
if self.__class__ is other.__class__:
return self.value > other.value
return NotImplemented
def __le__(self, other):
"""Return the lower than element."""
if self.__class__ is other.__class__:
return self.value <= other.value
return NotImplemented
def __lt__(self, other):
"""Return the lower element."""
if self.__class__ is other.__class__:
return self.value < other.value
return NotImplemented
class OrderedSet(MutableSet):
"""Ordered set taken from http://code.activestate.com/recipes/576694/."""
def __init__(self, iterable=None):
"""Initialize the set."""
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
"""Return the length of the set."""
return len(self.map)
def __contains__(self, key):
"""Check if key is in set."""
return key in self.map
def add(self, key):
"""Add an element to the end of the set."""
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def promote(self, key):
"""Promote element to beginning of the set, add if not there."""
if key in self.map:
self.discard(key)
begin = self.end[2]
curr = begin[1]
curr[2] = begin[1] = self.map[key] = [key, curr, begin]
def discard(self, key):
"""Discard an element from the set."""
if key in self.map:
key, prev_item, next_item = self.map.pop(key)
prev_item[2] = next_item
next_item[1] = prev_item
def __iter__(self):
"""Iteration of the set."""
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
"""Reverse the ordering."""
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True): # pylint: disable=arguments-differ
"""Pop element of the end of the set.
Set last=False to pop from the beginning.
"""
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def update(self, *args):
"""Add elements from args to the set."""
for item in chain(*args):
self.add(item)
def __repr__(self):
"""Return the representation."""
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
"""Return the comparision."""
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
class Throttle(object):
"""A class for throttling the execution of tasks.
This method decorator adds a cooldown to a method to prevent it from being
called more then 1 time within the timedelta interval `min_time` after it
returned its result.
Calling a method a second time during the interval will return None.
Pass keyword argument `no_throttle=True` to the wrapped method to make
the call not throttled.
Decorator takes in an optional second timedelta interval to throttle the
'no_throttle' calls.
Adds a datetime attribute `last_call` to the method.
"""
# pylint: disable=too-few-public-methods
def __init__(self, min_time, limit_no_throttle=None):
"""Initialize the throttle."""
self.min_time = min_time
self.limit_no_throttle = limit_no_throttle
def __call__(self, method):
"""Caller for the throttle."""
if self.limit_no_throttle is not None:
method = Throttle(self.limit_no_throttle)(method)
# Different methods that can be passed in:
# - a function
# - an unbound function on a class
# - a method (bound function on a class)
# We want to be able to differentiate between function and unbound
# methods (which are considered functions).
# All methods have the classname in their qualname seperated by a '.'
# Functions have a '.' in their qualname if defined inline, but will
# be prefixed by '.<locals>.' so we strip that out.
is_func = (not hasattr(method, '__self__') and
'.' not in method.__qualname__.split('.<locals>.')[-1])
@wraps(method)
def wrapper(*args, **kwargs):
"""Wrapper that allows wrapped to be called only once per min_time.
If we cannot acquire the lock, it is running so return None.
"""
# pylint: disable=protected-access
if hasattr(method, '__self__'):
host = method.__self__
elif is_func:
host = wrapper
else:
host = args[0] if args else wrapper
if not hasattr(host, '_throttle'):
host._throttle = {}
if id(self) not in host._throttle:
host._throttle[id(self)] = [threading.Lock(), None]
throttle = host._throttle[id(self)]
if not throttle[0].acquire(False):
return None
# Check if method is never called or no_throttle is given
force = not throttle[1] or kwargs.pop('no_throttle', False)
try:
if force or utcnow() - throttle[1] > self.min_time:
result = method(*args, **kwargs)
throttle[1] = utcnow()
return result
else:
return None
finally:
throttle[0].release()
return wrapper
class ThreadPool(object):
"""A priority queue-based thread pool."""
# pylint: disable=too-many-instance-attributes
def __init__(self, job_handler, worker_count=0, busy_callback=None):
"""Initialize the pool.
job_handler: method to be called from worker thread to handle job
worker_count: number of threads to run that handle jobs
busy_callback: method to be called when queue gets too big.
Parameters: worker_count, list of current_jobs,
pending_jobs_count
"""
self._job_handler = job_handler
self._busy_callback = busy_callback
self.worker_count = 0
self.busy_warning_limit = 0
self._work_queue = queue.PriorityQueue()
self.current_jobs = []
self._lock = threading.RLock()
self._quit_task = object()
self.running = True
for _ in range(worker_count):
self.add_worker()
def add_worker(self):
"""Add worker to the thread pool and reset warning limit."""
with self._lock:
if not self.running:
raise RuntimeError("ThreadPool not running")
worker = threading.Thread(
target=self._worker,
name='ThreadPool Worker {}'.format(self.worker_count))
worker.daemon = True
worker.start()
self.worker_count += 1
self.busy_warning_limit = self.worker_count * 3
def remove_worker(self):
"""Remove worker from the thread pool and reset warning limit."""
with self._lock:
if not self.running:
raise RuntimeError("ThreadPool not running")
self._work_queue.put(PriorityQueueItem(0, self._quit_task))
self.worker_count -= 1
self.busy_warning_limit = self.worker_count * 3
def add_job(self, priority, job):
"""Add a job to the queue."""
with self._lock:
if not self.running:
raise RuntimeError("ThreadPool not running")
self._work_queue.put(PriorityQueueItem(priority, job))
# Check if our queue is getting too big.
if self._work_queue.qsize() > self.busy_warning_limit \
and self._busy_callback is not None:
# Increase limit we will issue next warning.
self.busy_warning_limit *= 2
self._busy_callback(
self.worker_count, self.current_jobs,
self._work_queue.qsize())
def block_till_done(self):
"""Block till current work is done."""
self._work_queue.join()
def stop(self):
"""Finish all the jobs and stops all the threads."""
self.block_till_done()
with self._lock:
if not self.running:
return
# Tell the workers to quit
for _ in range(self.worker_count):
self.remove_worker()
self.running = False
# Wait till all workers have quit
self.block_till_done()
def _worker(self):
"""Handle jobs for the thread pool."""
while True:
# Get new item from work_queue
job = self._work_queue.get().item
if job is self._quit_task:
self._work_queue.task_done()
return
# Add to current running jobs
job_log = (utcnow(), job)
self.current_jobs.append(job_log)
# Do the job
self._job_handler(job)
# Remove from current running job
self.current_jobs.remove(job_log)
# Tell work_queue the task is done
self._work_queue.task_done()
class PriorityQueueItem(object):
"""Holds a priority and a value. Used within PriorityQueue."""
# pylint: disable=too-few-public-methods
def __init__(self, priority, item):
"""Initialize the queue."""
self.priority = priority
self.item = item
def __lt__(self, other):
"""Return the ordering."""
return self.priority < other.priority
| mit |
bxlab/bx-python | lib/bx/align/epo.py | 1 | 11523 | """Classes and utilities for mutliple alignments from the EPO pipeline"""
import logging
import os
import pickle as cPickle
import re
from collections import namedtuple
from ._epo import ( # noqa: F401
bed_union,
cummulative_intervals,
fastLoadChain,
rem_dash
)
log = logging.getLogger(__name__)
class Chain(namedtuple('Chain', 'score tName tSize tStrand tStart tEnd qName qSize qStrand qStart qEnd id')):
"""A Chain header as in http://genome.ucsc.edu/goldenPath/help/chain.html
chain coordinates are with respect to the strand, so for example tStart on the + strand is the
distance from the leftmost position; tStart on the - strand is the distance from the rightmost position."""
__slots__ = ()
def __str__(self):
return "chain {score} {tName} {tSize} {tStrand} {tStart} {tEnd} {qName} {qSize} {qStrand} {qStart} {qEnd} {id}".format(**self._asdict())
@classmethod
def _strfactory(cls, line):
"""factory class method for Chain
:param line: header of a chain (in .chain format)
"""
assert isinstance(line, str), "this is a factory from string"
line = line.rstrip().split()[1:] # the first component is the keyword "chain"
tup = [t[0](t[1]) for t in zip([int, str, int, str, int, int, str, int, str, int, int, str], line)]
return tuple.__new__(cls, tup)
@classmethod
def _make_from_epo(cls, trg_comp, qr_comp, trg_chrom_sizes, qr_chrom_sizes):
"""crate a chain of collinear rings from the given components.
The target of the chain will always be on the forward strand.
This is done to avoid confusion when mapping psl files. So,
if trg_comp.strand=-, qr_comp.strand=- (resp. +) the
chain header will have tStrand=+, qStrand=+ (resp. -). No strand
changes on the other cases.
:param trg_comp: target (i.e, the first) component
:type trg_comp: L{EPOitem}
:param qr_comp: query (i.e, the second) component
:type qr_comp: L{EPOitem}
:param trg_chrom_sizes: chromosome sizes of the target
:type trg_chrom_sizes: dictionary of the type (chrom) --> size
:param qr_chrom_sizes: chromosome sizes of the query
:type qr_chrom_sizes: dictionary of the type (chrom) --> size
:return: A L{Chain} instance"""
# size, target, query arrays
S, T, Q = [], [], []
# the target strand of the chain must be on the forward strand
trg_intervals = trg_comp.intervals(reverse=trg_comp.strand == '-')
qr_intervals = qr_comp.intervals(reverse=trg_comp.strand == '-')
if len(trg_intervals) == 0 or len(qr_intervals) == 0:
log.warning("deletion/insertion only intervals")
return None
A, B = rem_dash(trg_intervals, qr_intervals)
# correct for when cigar starts/ends with dashes (in number of bases)
tr_start_correction = max(B[0][0] - A[0][0], 0)
tr_end_correction = max(A[-1][1] - B[-1][1], 0)
qr_start_correction = max(A[0][0] - B[0][0], 0)
qr_end_correction = max(B[-1][1] - A[-1][1], 0)
a, b = A.pop(0), B.pop(0)
# intervals are 0-base, halfo-open => lengths = coordinate difference
while A or B:
if a[1] < b[1]:
T.append(0)
Q.append(A[0][0] - a[1])
S.append(min(a[1], b[1]) - max(a[0], b[0]))
a = A.pop(0)
elif b[1] < a[1]:
Q.append(0)
T.append(B[0][0] - b[1])
S.append(min(a[1], b[1]) - max(a[0], b[0]))
b = B.pop(0)
elif A and B:
assert 1 > 2, "there are dash columns"
else:
break
S.append(min(a[1], b[1]) - max(a[0], b[0]))
assert len(T) == len(Q) == len(S) - 1, "(S, T, Q) = (%d, %d, %d)" % tuple(map(len, (S, T, Q)))
tSize = trg_chrom_sizes[trg_comp.chrom]
qSize = qr_chrom_sizes[qr_comp.chrom]
# UCSC coordinates are 0-based, half-open and e! coordinates are 1-base, closed
# chain_start = epo_start - 1 and chain_end = epo_end
if qr_comp.strand == '+':
chain = Chain(
0, trg_comp.chrom, tSize, "+",
(trg_comp.start - 1) + tr_start_correction, trg_comp.end - tr_end_correction,
qr_comp.chrom, qSize, (qr_comp.strand == trg_comp.strand and '+' or '-'),
(qr_comp.start - 1) + qr_start_correction, qr_comp.end - qr_end_correction,
qr_comp.gabid)
else:
chain = Chain(
0, trg_comp.chrom, tSize, "+",
(trg_comp.start - 1) + tr_start_correction, trg_comp.end - tr_end_correction,
qr_comp.chrom, qSize, (qr_comp.strand == trg_comp.strand and '+' or '-'),
(qr_comp.start - 1) + qr_end_correction, qr_comp.end - qr_start_correction,
qr_comp.gabid)
# strand correction. in UCSC coordinates this is: size - coord
if chain.qStrand == '-':
chain = chain._replace(
qEnd=chain.qSize - chain.qStart,
qStart=chain.qSize - chain.qEnd)
assert chain.tEnd - chain.tStart == sum(S) + sum(T), "[%s] %d != %d" % (
str(chain), chain.tEnd - chain.tStart, sum(S) + sum(T))
assert chain.qEnd - chain.qStart == sum(S) + sum(Q), "[%s] %d != %d" % (
str(chain), chain.qEnd - chain.qStart, sum(S) + sum(Q))
return chain, S, T, Q
def slice(self, who):
"return the slice entry (in a bed6 format), AS IS in the chain header"
assert who in ('t', 'q'), "who should be 't' or 'q'"
if who == 't':
return (self.tName, self.tStart, self.tEnd, self.id, self.score, self.tStrand)
else:
return (self.qName, self.qStart, self.qEnd, self.id, self.score, self.qStrand)
def bedInterval(self, who):
"return a BED6 entry, thus DOES coordinate conversion for minus strands"
if who == 't':
st, en = self.tStart, self.tEnd
if self.tStrand == '-':
st, en = self.tSize-en, self.tSize-st
return (self.tName, st, en, self.id, self.score, self.tStrand)
else:
st, en = self.qStart, self.qEnd
if self.qStrand == '-':
st, en = self.qSize-en, self.qSize-st
assert en-st == self.qEnd - self.qStart
return (self.qName, st, en, self.id, self.score, self.qStrand)
@classmethod
def _parse_file(cls, path, pickle=False):
"""parse a .chain file into a list of the type [(L{Chain}, arr, arr, arr) ...]
:param fname: name of the file"""
fname = path
if fname.endswith(".gz"):
fname = path[:-3]
if fname.endswith('.pkl'):
# you asked for the pickled file. I'll give it to you
log.debug("loading pickled file %s ...", fname)
with open(fname, "rb") as f:
return cPickle.load(f)
elif os.path.isfile("%s.pkl" % fname):
# there is a cached version I can give to you
log.info("loading pickled file %s.pkl ...", fname)
if os.stat(path).st_mtime > os.stat("%s.pkl" % fname).st_mtime:
log.critical("*** pickled file %s.pkl is not up to date ***", fname)
try:
with open("%s.pkl" % fname, "rb") as f:
return cPickle.load(f)
except Exception:
log.warning("Loading pickled file %s.pkl failed", fname)
data = fastLoadChain(path, cls._strfactory)
if pickle and not os.path.isfile('%s.pkl' % fname):
log.info("pickling to %s.pkl", fname)
with open('%s.pkl' % fname, 'wb') as f:
cPickle.dump(data, f)
return data
class EPOitem(namedtuple('Epo_item', 'species gabid chrom start end strand cigar')):
"this format is how alignments are delivered from e!"
__slots__ = ()
cigar_pattern = re.compile(r"(\d*)([MD])")
def __repr__(self):
return str(self)
def __str__(self):
c = self.cigar[:5] + "..." + self.cigar[-5:]
return "(%s %s %s %d %d %s %s)" % tuple(self[:6] + (c,))
@classmethod
def _strfactory(cls, line):
"""factory method for an EPOitem
:param line: a line of input"""
cmp = line.rstrip().split()
chrom = cmp[2]
if not chrom.startswith("chr"):
chrom = "chr%s" % chrom
instance = tuple.__new__(
cls,
(cmp[0], cmp[1], chrom, int(cmp[3]), int(cmp[4]), {'1': '+', '-1': '-'}[cmp[5]], cmp[6]))
span = instance.end - instance.start + 1
m_num = sum((t[1] == "M" and [t[0]] or [0])[0] for t in instance.cigar_iter(False))
if span != m_num:
log.warning("[{gabid}] {species}.{chrom}:{start}-{end}.".format(**instance._asdict()) + "(span) %d != %d (matches)" % (span, m_num))
return None
return instance
@classmethod
def _parse_epo(cls, fname):
"""Load an entire file in the EPO format into a dictionary of the type {gab_id => [Epoitem, ...]}
:param fname: file name"""
data = {}
with open(fname) as fd:
for el in (cls._strfactory(_) for _ in fd):
if el:
data.setdefault(el.gabid, []).append(el)
log.info("parsed %d elements from %s", len(data), fname)
return data
def cigar_iter(self, reverse):
"""self.cigar => [(length, type) ... ] iterate the cigar
:param reverse: whether to iterate in the reverse direction (right-to-left)
:type reverse: boolean
:return a list of pairs of the type [(length, M/D) ..]
"""
l = 0
P = self.cigar_pattern
data = []
cigar = self.cigar
parsed_cigar = re.findall(P, cigar)
if reverse:
parsed_cigar = parsed_cigar[::-1]
for _l, t in parsed_cigar:
# 1M is encoded as M
l = (_l and int(_l) or 1) # int(_l) cannot be 0
data.append((l, t))
return data
def intervals(self, reverse, thr=0):
"""return a list of (0-based half-open) intervals representing the match regions of the cigar
for example 4MD4M2DM with reverse=False will produce [(0,4), (5,9), (11,12)]
4MD4M2DM with reverse=True will produce [(0,1), (3,7), (8,12)] (= 12 - previous interval)
:param reverse: whether to iterate in the reverse direction (right-to-left) (this is passed as is to self.cigar_iter)
:type reverse: boolean
:param thr: shift all intervals by this much
:type thr: integer
:return: list of pairs"""
d = [(thr, thr)]
dl = 0
for tup in self.cigar_iter(reverse):
if tup[1] == "D":
dl = tup[0]
else:
s = d[-1][1] + dl
d.append((s, s+tup[0]))
assert d[0] == (thr, thr)
# assert that nr. of Ms in the interval == sum of produced intervals
assert sum(t[0] for t in self.cigar_iter(False) if t[1] == "M") == sum(t[1]-t[0] for t in d)
d_sum = sum(t[1]-t[0] for t in d)
assert self.end - self.start + 1 == d_sum, "[ (%d, %d) = %d ] != %d" % (
self.start, self.end, self.end-self.start+1, d_sum)
return d[1:] # clip the (thr, thr) entry
| mit |
xutian/virt-test | virttest/qemu_monitor_unittest.py | 14 | 11219 | import unittest
import common
from qemu_monitor import Monitor
import qemu_monitor
class MockMonitor(qemu_monitor.Monitor):
""" Dummy class inherited from qemu_monitor.HumanMonitor """
def __init__(self): # pylint: disable=W0231
pass
def __del__(self):
pass
class InfoNumaTests(unittest.TestCase):
def testZeroNodes(self):
d = "0 nodes\n"
r = Monitor.parse_info_numa(d)
self.assertEquals(r, [])
def testTwoNodes(self):
d = "2 nodes\n" + \
"node 0 cpus: 0 2 4\n" + \
"node 0 size: 12 MB\n" + \
"node 1 cpus: 1 3 5\n" + \
"node 1 size: 34 MB\n"
r = Monitor.parse_info_numa(d)
self.assertEquals(r, [(12, set([0, 2, 4])),
(34, set([1, 3, 5]))])
class InfoBlocks(unittest.TestCase):
def testParseBlocks(self):
info_1_4 = """ide0-hd0: removable=0 io-status=ok file=c.qcow2 backing_file=b.qcow2 backing_file_depth=2 ro=0 drv=qcow2 encrypted=0 bps=0 bps_rd=0 bps_wr=0 iops=0 iops_rd=0 iops_wr=0
scsi0-hd0: removable=0 io-status=ok file=a.qcow ro=1 drv=raw encrypted=0 bps=0 bps_rd=0 bps_wr=0 iops=0 iops_rd=0 iops_wr=0
scsi0-hd1: removable=0 io-status=ok file=enc.qcow2 ro=0 drv=qcow2 encrypted=1 bps=0 bps_rd=0 bps_wr=0 iops=0 iops_rd=0 iops_wr=0
ide1-cd0: removable=1 locked=0 tray-open=0 io-status=ok [not inserted]
floppy0: removable=1 locked=0 tray-open=0 [not inserted]
sd0: removable=1 locked=0 tray-open=0 [not inserted]"""
info_1_5 = """ide0-hd0: c.qcow2 (qcow2)
Backing file: b.qcow2 (chain depth: 2)
scsi0-hd0: a.qcow (raw, read-only)
scsi0-hd1: enc.qcow2 (qcow2, encrypted)
ide1-cd0: [not inserted]
Removable device: not locked, tray closed
floppy0: [not inserted]
Removable device: not locked, tray closed
sd0: [not inserted]
Removable device: not locked, tray closed"""
info_qmp = [{"io-status": "ok", "device": "ide0-hd0", "locked":
False, "removable": False, "inserted": {"iops_rd": 0,
"iops_wr": 0, "ro": False, "backing_file_depth": 2,
"drv": "qcow2", "iops": 0, "bps_wr": 0, "backing_file":
"b.qcow2", "encrypted": False, "bps": 0, "bps_rd": 0,
"file": "c.qcow2", "encryption_key_missing": False},
"type": "unknown"}, {"io-status": "ok", "device":
"scsi0-hd0", "locked": False, "removable": False,
"inserted": {"iops_rd": 0, "iops_wr": 0, "ro": True,
"backing_file_depth": 0, "drv": "raw", "iops": 0,
"bps_wr": 0, "encrypted": False, "bps": 0, "bps_rd": 0,
"file": "a.qcow", "encryption_key_missing": False},
"type": "unknown"}, {"io-status": "ok", "device":
"scsi0-hd1", "locked": False, "removable": False,
"inserted": {"iops_rd": 0, "iops_wr": 0, "ro": False,
"backing_file_depth": 0, "drv": "qcow2", "iops": 0,
"bps_wr": 0, "encrypted": True, "bps": 0, "bps_rd": 0,
"file": "enc.qcow2", "encryption_key_missing": True},
"type": "unknown"}, {"io-status": "ok", "device":
"ide1-cd0", "locked": False, "removable": True,
"tray_open": False, "type": "unknown"}, {"device":
"floppy0", "locked": False, "removable": True,
"tray_open": False, "type": "unknown"}, {"device": "sd0",
"locked": False, "removable": True, "tray_open": False,
"type": "unknown"}]
monitor = MockMonitor()
# Test "info block" version 1.4
monitor.info = lambda _what, _debug: info_1_4
out1 = monitor.info_block()
exp = {'sd0': {'tray-open': 0, 'locked': 0, 'not-inserted': 1,
'removable': 1},
'ide0-hd0': {'bps_rd': 0, 'backing_file_depth': 2,
'removable': 0, 'encrypted': 0, 'bps_wr': 0,
'io-status': 'ok', 'drv': 'qcow2', 'bps': 0,
'iops': 0, 'file': 'c.qcow2', 'iops_rd': 0,
'ro': 0, 'backing_file': 'b.qcow2', 'iops_wr': 0},
'floppy0': {'tray-open': 0, 'locked': 0, 'not-inserted': 1,
'removable': 1},
'ide1-cd0': {'tray-open': 0, 'locked': 0, 'not-inserted': 1,
'io-status': 'ok', 'removable': 1},
'scsi0-hd0': {'bps_rd': 0, 'removable': 0, 'encrypted': 0,
'bps_wr': 0, 'io-status': 'ok', 'drv': 'raw',
'bps': 0, 'iops': 0, 'file': 'a.qcow',
'iops_rd': 0, 'ro': 1, 'iops_wr': 0},
'scsi0-hd1': {'bps_rd': 0, 'removable': 0, 'encrypted': 1,
'bps_wr': 0, 'io-status': 'ok', 'drv': 'qcow2',
'bps': 0, 'iops': 0, 'file': 'enc.qcow2',
'iops_rd': 0, 'ro': 0, 'iops_wr': 0}}
assert out1 == exp, ("Info block of qemu 1.4 is parsed incorrectly\n%s"
"\n%s" % (out1, exp))
# Test "info block" version 1.5
monitor.info = lambda _what, _debug: info_1_5
out2 = monitor.info_block()
exp = {'sd0': {'not-inserted': 1, 'removable': 1},
'ide0-hd0': {'backing_file_depth': 2, 'drv': 'qcow2',
'backing_file': 'b.qcow2', 'file': 'c.qcow2'},
'floppy0': {'not-inserted': 1, 'removable': 1},
'ide1-cd0': {'not-inserted': 1, 'removable': 1},
'scsi0-hd0': {'drv': 'raw', 'ro': 1, 'file': 'a.qcow'},
'scsi0-hd1': {'encrypted': 1, 'drv': 'qcow2',
'file': 'enc.qcow2'}}
assert out2 == exp, ("Info block of qemu 1.5 is parsed incorrectly\n%s"
"\n%s" % (out2, exp))
# verify, that booth representation gives the same results
# (qemu-1.5 is less informative so not all params are checked)
for name, params in out2.iteritems():
assert name in out1, ("missing disk '%s' in info-1.5\n%s\n%s"
% (name, out2, out1))
for key, value in params.iteritems():
assert out1[name].get(key, 0) == value, ("value of disk %s "
"mismatch in info-1.5 %s=%s (%s)\n%s\n%s"
% (name, key, value, out1[
name].get(key, 0),
out2, out1))
# Test "query-block" qmp version
monitor.info = lambda _what, _debug: info_qmp
out3 = monitor.info_block()
exp = {'sd0': {'type': 'unknown', 'tray_open': False,
'not-inserted': True, 'removable': True,
'locked': False},
'ide0-hd0': {'bps_rd': 0, 'backing_file_depth': 2,
'removable': False, 'type': 'unknown',
'encrypted': False, 'bps_wr': 0, 'locked': False,
'drv': 'qcow2', 'bps': 0, 'iops': 0,
'io-status': 'ok', 'file': 'c.qcow2',
'iops_rd': 0, 'encryption_key_missing': False,
'ro': False, 'backing_file': 'b.qcow2',
'iops_wr': 0},
'floppy0': {'type': 'unknown', 'tray_open': False,
'not-inserted': True, 'removable': True,
'locked': False},
'ide1-cd0': {'locked': False, 'tray_open': False,
'io-status': 'ok', 'removable': True,
'not-inserted': True, 'type': 'unknown'},
'scsi0-hd0': {'bps_rd': 0, 'backing_file_depth': 0,
'removable': False, 'encrypted': False,
'bps_wr': 0, 'locked': False, 'drv': 'raw',
'bps': 0, 'iops': 0, 'io-status': 'ok',
'file': 'a.qcow', 'iops_rd': 0,
'encryption_key_missing': False, 'ro': True,
'type': 'unknown', 'iops_wr': 0},
'scsi0-hd1': {'bps_rd': 0, 'backing_file_depth': 0,
'removable': False, 'encrypted': True,
'bps_wr': 0, 'locked': False, 'drv': 'qcow2',
'bps': 0, 'iops': 0, 'io-status': 'ok',
'file': 'enc.qcow2', 'iops_rd': 0,
'encryption_key_missing': True, 'ro': False,
'type': 'unknown', 'iops_wr': 0}}
assert out3 == exp, ("QMP query-block of qemu is parsed incorrectly\n"
"%s\n%s" % (out3, exp))
# verify, that booth representation gives the same results
# (qemu-1.4 is less informative so not all params are checked)
for name, params in out1.iteritems():
assert name in out3, ("missing disk '%s' in info-1.5\n%s\n%s"
% (name, out1, out3))
for key, value in params.iteritems():
assert out3[name].get(key, 0) == value, ("value of disk %s "
"mismatch in QMP version %s=%s (%s)\n%s\n%s"
% (name, key, value, out3[
name].get(key, 0),
out1, out3))
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
perkinslr/pypyjs | website/js/pypy.js-0.2.0/lib/modules/profile.py | 166 | 22782 | #! /usr/bin/env python
#
# Class for profiling python code. rev 1.0 6/2/94
#
# Written by James Roskind
# Based on prior profile module by Sjoerd Mullender...
# which was hacked somewhat by: Guido van Rossum
"""Class for profiling Python code."""
# Copyright Disney Enterprises, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
import sys
import os
import time
import marshal
from optparse import OptionParser
__all__ = ["run", "runctx", "help", "Profile"]
# Sample timer for use with
#i_count = 0
#def integer_timer():
# global i_count
# i_count = i_count + 1
# return i_count
#itimes = integer_timer # replace with C coded timer returning integers
#**************************************************************************
# The following are the static member functions for the profiler class
# Note that an instance of Profile() is *not* needed to call them.
#**************************************************************************
def run(statement, filename=None, sort=-1):
"""Run statement under profiler optionally saving results in filename
This function takes a single argument that can be passed to the
"exec" statement, and an optional file name. In all cases this
routine attempts to "exec" its first argument and gather profiling
statistics from the execution. If no file name is present, then this
function automatically prints a simple profiling report, sorted by the
standard name string (file/line/function-name) that is presented in
each line.
"""
prof = Profile()
try:
prof = prof.run(statement)
except SystemExit:
pass
if filename is not None:
prof.dump_stats(filename)
else:
return prof.print_stats(sort)
def runctx(statement, globals, locals, filename=None, sort=-1):
"""Run statement under profiler, supplying your own globals and locals,
optionally saving results in filename.
statement and filename have the same semantics as profile.run
"""
prof = Profile()
try:
prof = prof.runctx(statement, globals, locals)
except SystemExit:
pass
if filename is not None:
prof.dump_stats(filename)
else:
return prof.print_stats(sort)
# Backwards compatibility.
def help():
print "Documentation for the profile module can be found "
print "in the Python Library Reference, section 'The Python Profiler'."
if hasattr(os, "times"):
def _get_time_times(timer=os.times):
t = timer()
return t[0] + t[1]
# Using getrusage(3) is better than clock(3) if available:
# on some systems (e.g. FreeBSD), getrusage has a higher resolution
# Furthermore, on a POSIX system, returns microseconds, which
# wrap around after 36min.
_has_res = 0
try:
import resource
resgetrusage = lambda: resource.getrusage(resource.RUSAGE_SELF)
def _get_time_resource(timer=resgetrusage):
t = timer()
return t[0] + t[1]
_has_res = 1
except ImportError:
pass
class Profile:
"""Profiler class.
self.cur is always a tuple. Each such tuple corresponds to a stack
frame that is currently active (self.cur[-2]). The following are the
definitions of its members. We use this external "parallel stack" to
avoid contaminating the program that we are profiling. (old profiler
used to write into the frames local dictionary!!) Derived classes
can change the definition of some entries, as long as they leave
[-2:] intact (frame and previous tuple). In case an internal error is
detected, the -3 element is used as the function name.
[ 0] = Time that needs to be charged to the parent frame's function.
It is used so that a function call will not have to access the
timing data for the parent frame.
[ 1] = Total time spent in this frame's function, excluding time in
subfunctions (this latter is tallied in cur[2]).
[ 2] = Total time spent in subfunctions, excluding time executing the
frame's function (this latter is tallied in cur[1]).
[-3] = Name of the function that corresponds to this frame.
[-2] = Actual frame that we correspond to (used to sync exception handling).
[-1] = Our parent 6-tuple (corresponds to frame.f_back).
Timing data for each function is stored as a 5-tuple in the dictionary
self.timings[]. The index is always the name stored in self.cur[-3].
The following are the definitions of the members:
[0] = The number of times this function was called, not counting direct
or indirect recursion,
[1] = Number of times this function appears on the stack, minus one
[2] = Total time spent internal to this function
[3] = Cumulative time that this function was present on the stack. In
non-recursive functions, this is the total execution time from start
to finish of each invocation of a function, including time spent in
all subfunctions.
[4] = A dictionary indicating for each function name, the number of times
it was called by us.
"""
bias = 0 # calibration constant
def __init__(self, timer=None, bias=None):
self.timings = {}
self.cur = None
self.cmd = ""
self.c_func_name = ""
if bias is None:
bias = self.bias
self.bias = bias # Materialize in local dict for lookup speed.
if not timer:
if _has_res:
self.timer = resgetrusage
self.dispatcher = self.trace_dispatch
self.get_time = _get_time_resource
elif hasattr(time, 'clock'):
self.timer = self.get_time = time.clock
self.dispatcher = self.trace_dispatch_i
elif hasattr(os, 'times'):
self.timer = os.times
self.dispatcher = self.trace_dispatch
self.get_time = _get_time_times
else:
self.timer = self.get_time = time.time
self.dispatcher = self.trace_dispatch_i
else:
self.timer = timer
t = self.timer() # test out timer function
try:
length = len(t)
except TypeError:
self.get_time = timer
self.dispatcher = self.trace_dispatch_i
else:
if length == 2:
self.dispatcher = self.trace_dispatch
else:
self.dispatcher = self.trace_dispatch_l
# This get_time() implementation needs to be defined
# here to capture the passed-in timer in the parameter
# list (for performance). Note that we can't assume
# the timer() result contains two values in all
# cases.
def get_time_timer(timer=timer, sum=sum):
return sum(timer())
self.get_time = get_time_timer
self.t = self.get_time()
self.simulate_call('profiler')
# Heavily optimized dispatch routine for os.times() timer
def trace_dispatch(self, frame, event, arg):
timer = self.timer
t = timer()
t = t[0] + t[1] - self.t - self.bias
if event == "c_call":
self.c_func_name = arg.__name__
if self.dispatch[event](self, frame,t):
t = timer()
self.t = t[0] + t[1]
else:
r = timer()
self.t = r[0] + r[1] - t # put back unrecorded delta
# Dispatch routine for best timer program (return = scalar, fastest if
# an integer but float works too -- and time.clock() relies on that).
def trace_dispatch_i(self, frame, event, arg):
timer = self.timer
t = timer() - self.t - self.bias
if event == "c_call":
self.c_func_name = arg.__name__
if self.dispatch[event](self, frame, t):
self.t = timer()
else:
self.t = timer() - t # put back unrecorded delta
# Dispatch routine for macintosh (timer returns time in ticks of
# 1/60th second)
def trace_dispatch_mac(self, frame, event, arg):
timer = self.timer
t = timer()/60.0 - self.t - self.bias
if event == "c_call":
self.c_func_name = arg.__name__
if self.dispatch[event](self, frame, t):
self.t = timer()/60.0
else:
self.t = timer()/60.0 - t # put back unrecorded delta
# SLOW generic dispatch routine for timer returning lists of numbers
def trace_dispatch_l(self, frame, event, arg):
get_time = self.get_time
t = get_time() - self.t - self.bias
if event == "c_call":
self.c_func_name = arg.__name__
if self.dispatch[event](self, frame, t):
self.t = get_time()
else:
self.t = get_time() - t # put back unrecorded delta
# In the event handlers, the first 3 elements of self.cur are unpacked
# into vrbls w/ 3-letter names. The last two characters are meant to be
# mnemonic:
# _pt self.cur[0] "parent time" time to be charged to parent frame
# _it self.cur[1] "internal time" time spent directly in the function
# _et self.cur[2] "external time" time spent in subfunctions
def trace_dispatch_exception(self, frame, t):
rpt, rit, ret, rfn, rframe, rcur = self.cur
if (rframe is not frame) and rcur:
return self.trace_dispatch_return(rframe, t)
self.cur = rpt, rit+t, ret, rfn, rframe, rcur
return 1
def trace_dispatch_call(self, frame, t):
if self.cur and frame.f_back is not self.cur[-2]:
rpt, rit, ret, rfn, rframe, rcur = self.cur
if not isinstance(rframe, Profile.fake_frame):
assert rframe.f_back is frame.f_back, ("Bad call", rfn,
rframe, rframe.f_back,
frame, frame.f_back)
self.trace_dispatch_return(rframe, 0)
assert (self.cur is None or \
frame.f_back is self.cur[-2]), ("Bad call",
self.cur[-3])
fcode = frame.f_code
fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
self.cur = (t, 0, 0, fn, frame, self.cur)
timings = self.timings
if fn in timings:
cc, ns, tt, ct, callers = timings[fn]
timings[fn] = cc, ns + 1, tt, ct, callers
else:
timings[fn] = 0, 0, 0, 0, {}
return 1
def trace_dispatch_c_call (self, frame, t):
fn = ("", 0, self.c_func_name)
self.cur = (t, 0, 0, fn, frame, self.cur)
timings = self.timings
if fn in timings:
cc, ns, tt, ct, callers = timings[fn]
timings[fn] = cc, ns+1, tt, ct, callers
else:
timings[fn] = 0, 0, 0, 0, {}
return 1
def trace_dispatch_return(self, frame, t):
if frame is not self.cur[-2]:
assert frame is self.cur[-2].f_back, ("Bad return", self.cur[-3])
self.trace_dispatch_return(self.cur[-2], 0)
# Prefix "r" means part of the Returning or exiting frame.
# Prefix "p" means part of the Previous or Parent or older frame.
rpt, rit, ret, rfn, frame, rcur = self.cur
rit = rit + t
frame_total = rit + ret
ppt, pit, pet, pfn, pframe, pcur = rcur
self.cur = ppt, pit + rpt, pet + frame_total, pfn, pframe, pcur
timings = self.timings
cc, ns, tt, ct, callers = timings[rfn]
if not ns:
# This is the only occurrence of the function on the stack.
# Else this is a (directly or indirectly) recursive call, and
# its cumulative time will get updated when the topmost call to
# it returns.
ct = ct + frame_total
cc = cc + 1
if pfn in callers:
callers[pfn] = callers[pfn] + 1 # hack: gather more
# stats such as the amount of time added to ct courtesy
# of this specific call, and the contribution to cc
# courtesy of this call.
else:
callers[pfn] = 1
timings[rfn] = cc, ns - 1, tt + rit, ct, callers
return 1
dispatch = {
"call": trace_dispatch_call,
"exception": trace_dispatch_exception,
"return": trace_dispatch_return,
"c_call": trace_dispatch_c_call,
"c_exception": trace_dispatch_return, # the C function returned
"c_return": trace_dispatch_return,
}
# The next few functions play with self.cmd. By carefully preloading
# our parallel stack, we can force the profiled result to include
# an arbitrary string as the name of the calling function.
# We use self.cmd as that string, and the resulting stats look
# very nice :-).
def set_cmd(self, cmd):
if self.cur[-1]: return # already set
self.cmd = cmd
self.simulate_call(cmd)
class fake_code:
def __init__(self, filename, line, name):
self.co_filename = filename
self.co_line = line
self.co_name = name
self.co_firstlineno = 0
def __repr__(self):
return repr((self.co_filename, self.co_line, self.co_name))
class fake_frame:
def __init__(self, code, prior):
self.f_code = code
self.f_back = prior
def simulate_call(self, name):
code = self.fake_code('profile', 0, name)
if self.cur:
pframe = self.cur[-2]
else:
pframe = None
frame = self.fake_frame(code, pframe)
self.dispatch['call'](self, frame, 0)
# collect stats from pending stack, including getting final
# timings for self.cmd frame.
def simulate_cmd_complete(self):
get_time = self.get_time
t = get_time() - self.t
while self.cur[-1]:
# We *can* cause assertion errors here if
# dispatch_trace_return checks for a frame match!
self.dispatch['return'](self, self.cur[-2], t)
t = 0
self.t = get_time() - t
def print_stats(self, sort=-1):
import pstats
pstats.Stats(self).strip_dirs().sort_stats(sort). \
print_stats()
def dump_stats(self, file):
f = open(file, 'wb')
self.create_stats()
marshal.dump(self.stats, f)
f.close()
def create_stats(self):
self.simulate_cmd_complete()
self.snapshot_stats()
def snapshot_stats(self):
self.stats = {}
for func, (cc, ns, tt, ct, callers) in self.timings.iteritems():
callers = callers.copy()
nc = 0
for callcnt in callers.itervalues():
nc += callcnt
self.stats[func] = cc, nc, tt, ct, callers
# The following two methods can be called by clients to use
# a profiler to profile a statement, given as a string.
def run(self, cmd):
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
self.set_cmd(cmd)
sys.setprofile(self.dispatcher)
try:
exec cmd in globals, locals
finally:
sys.setprofile(None)
return self
# This method is more useful to profile a single function call.
def runcall(self, func, *args, **kw):
self.set_cmd(repr(func))
sys.setprofile(self.dispatcher)
try:
return func(*args, **kw)
finally:
sys.setprofile(None)
#******************************************************************
# The following calculates the overhead for using a profiler. The
# problem is that it takes a fair amount of time for the profiler
# to stop the stopwatch (from the time it receives an event).
# Similarly, there is a delay from the time that the profiler
# re-starts the stopwatch before the user's code really gets to
# continue. The following code tries to measure the difference on
# a per-event basis.
#
# Note that this difference is only significant if there are a lot of
# events, and relatively little user code per event. For example,
# code with small functions will typically benefit from having the
# profiler calibrated for the current platform. This *could* be
# done on the fly during init() time, but it is not worth the
# effort. Also note that if too large a value specified, then
# execution time on some functions will actually appear as a
# negative number. It is *normal* for some functions (with very
# low call counts) to have such negative stats, even if the
# calibration figure is "correct."
#
# One alternative to profile-time calibration adjustments (i.e.,
# adding in the magic little delta during each event) is to track
# more carefully the number of events (and cumulatively, the number
# of events during sub functions) that are seen. If this were
# done, then the arithmetic could be done after the fact (i.e., at
# display time). Currently, we track only call/return events.
# These values can be deduced by examining the callees and callers
# vectors for each functions. Hence we *can* almost correct the
# internal time figure at print time (note that we currently don't
# track exception event processing counts). Unfortunately, there
# is currently no similar information for cumulative sub-function
# time. It would not be hard to "get all this info" at profiler
# time. Specifically, we would have to extend the tuples to keep
# counts of this in each frame, and then extend the defs of timing
# tuples to include the significant two figures. I'm a bit fearful
# that this additional feature will slow the heavily optimized
# event/time ratio (i.e., the profiler would run slower, fur a very
# low "value added" feature.)
#**************************************************************
def calibrate(self, m, verbose=0):
if self.__class__ is not Profile:
raise TypeError("Subclasses must override .calibrate().")
saved_bias = self.bias
self.bias = 0
try:
return self._calibrate_inner(m, verbose)
finally:
self.bias = saved_bias
def _calibrate_inner(self, m, verbose):
get_time = self.get_time
# Set up a test case to be run with and without profiling. Include
# lots of calls, because we're trying to quantify stopwatch overhead.
# Do not raise any exceptions, though, because we want to know
# exactly how many profile events are generated (one call event, +
# one return event, per Python-level call).
def f1(n):
for i in range(n):
x = 1
def f(m, f1=f1):
for i in range(m):
f1(100)
f(m) # warm up the cache
# elapsed_noprofile <- time f(m) takes without profiling.
t0 = get_time()
f(m)
t1 = get_time()
elapsed_noprofile = t1 - t0
if verbose:
print "elapsed time without profiling =", elapsed_noprofile
# elapsed_profile <- time f(m) takes with profiling. The difference
# is profiling overhead, only some of which the profiler subtracts
# out on its own.
p = Profile()
t0 = get_time()
p.runctx('f(m)', globals(), locals())
t1 = get_time()
elapsed_profile = t1 - t0
if verbose:
print "elapsed time with profiling =", elapsed_profile
# reported_time <- "CPU seconds" the profiler charged to f and f1.
total_calls = 0.0
reported_time = 0.0
for (filename, line, funcname), (cc, ns, tt, ct, callers) in \
p.timings.items():
if funcname in ("f", "f1"):
total_calls += cc
reported_time += tt
if verbose:
print "'CPU seconds' profiler reported =", reported_time
print "total # calls =", total_calls
if total_calls != m + 1:
raise ValueError("internal error: total calls = %d" % total_calls)
# reported_time - elapsed_noprofile = overhead the profiler wasn't
# able to measure. Divide by twice the number of calls (since there
# are two profiler events per call in this test) to get the hidden
# overhead per event.
mean = (reported_time - elapsed_noprofile) / 2.0 / total_calls
if verbose:
print "mean stopwatch overhead per profile event =", mean
return mean
#****************************************************************************
def Stats(*args):
print 'Report generating functions are in the "pstats" module\a'
def main():
usage = "profile.py [-o output_file_path] [-s sort] scriptfile [arg] ..."
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('-o', '--outfile', dest="outfile",
help="Save stats to <outfile>", default=None)
parser.add_option('-s', '--sort', dest="sort",
help="Sort order when printing to stdout, based on pstats.Stats class",
default=-1)
if not sys.argv[1:]:
parser.print_usage()
sys.exit(2)
(options, args) = parser.parse_args()
sys.argv[:] = args
if len(args) > 0:
progname = args[0]
sys.path.insert(0, os.path.dirname(progname))
with open(progname, 'rb') as fp:
code = compile(fp.read(), progname, 'exec')
globs = {
'__file__': progname,
'__name__': '__main__',
'__package__': None,
}
runctx(code, globs, None, options.outfile, options.sort)
else:
parser.print_usage()
return parser
# When invoked as main program, invoke the profiler on a script
if __name__ == '__main__':
main()
| mit |
Arcensoth/cogbot | cogbot/cogs/join_leave/join_leave_server_state.py | 1 | 2346 | from discord import Member, Role
from discord.ext.commands import Context
from cogbot.cogs.abc.base_cog import BaseCogServerState
from cogbot.cogs.join_leave.join_leave_options import JoinLeaveOptions
class JoinLeaveServerState(BaseCogServerState[JoinLeaveOptions]):
async def create_options(self) -> JoinLeaveOptions:
return await JoinLeaveOptions().init(self, self.raw_options)
async def join_role(self, ctx: Context, author: Member, role_alias: str):
try:
role_entry = self.options.role_entry_from_alias[role_alias.lower()]
role = self.bot.get_role(self.server, role_entry.role_id)
await self.bot.add_roles(author, role)
await self.bot.say(f"{author.mention} has joined {role}")
except:
self.log.info(f"{author} failed to join the role: {role_alias}")
await self.bot.react_question(ctx)
async def leave_role(self, ctx: Context, author: Member, role_alias: str):
try:
role_entry = self.options.role_entry_from_alias[role_alias]
role = self.bot.get_role(self.server, role_entry.role_id)
await self.bot.remove_roles(author, role)
await self.bot.say(f"{author.mention} has left {role}")
except:
self.log.info(f"{author} failed to leave the role: {role_alias}")
await self.bot.react_question(ctx)
async def list_roles(self, ctx: Context, author: Member):
role_lines = []
for role_entry in self.options.role_entries:
role: Role = self.bot.get_role(self.server, role_entry.role_id)
role_lines.append(f"{role}")
role_aliases = role_entry.aliases
first_role_alias = role_aliases[0]
other_role_aliases = role_aliases[1:]
role_aliases_line = f" >join {first_role_alias}"
if other_role_aliases:
other_role_aliases_str = " or ".join(
f'"{role_alias}"' for role_alias in other_role_aliases
)
role_aliases_line = f"{role_aliases_line} (or {other_role_aliases_str})"
role_lines.append(role_aliases_line)
roles_str = "\n".join(role_lines)
await self.bot.say(
f"{author.mention} Available self-assignable roles:\n```\n{roles_str}\n```"
)
| mit |
MSOpenTech/edx-platform | openedx/core/djangoapps/course_groups/management/commands/tests/test_remove_users_from_multiple_cohorts.py | 91 | 3951 | """
Tests for cleanup of users which are added in multiple cohorts of a course
"""
from django.core.exceptions import MultipleObjectsReturned
from django.core.management import call_command
from django.test.client import RequestFactory
from openedx.core.djangoapps.course_groups.views import cohort_handler
from openedx.core.djangoapps.course_groups.cohorts import get_cohort, get_cohort_by_name
from openedx.core.djangoapps.course_groups.tests.helpers import config_course_cohorts
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class TestMultipleCohortUsers(ModuleStoreTestCase):
"""
Base class for testing users with multiple cohorts
"""
def setUp(self):
"""
setup course, user and request for tests
"""
super(TestMultipleCohortUsers, self).setUp()
self.course1 = CourseFactory.create()
self.course2 = CourseFactory.create()
self.user1 = UserFactory(is_staff=True)
self.user2 = UserFactory(is_staff=True)
self.request = RequestFactory().get("dummy_url")
self.request.user = self.user1
def test_users_with_multiple_cohorts_cleanup(self):
"""
Test that user which have been added in multiple cohorts of a course,
can get cohorts without error after running cohorts cleanup command
"""
# set two auto_cohort_groups for both courses
config_course_cohorts(
self.course1, is_cohorted=True, auto_cohorts=["Course1AutoGroup1", "Course1AutoGroup2"]
)
config_course_cohorts(
self.course2, is_cohorted=True, auto_cohorts=["Course2AutoGroup1", "Course2AutoGroup2"]
)
# get the cohorts from the courses, which will cause auto cohorts to be created
cohort_handler(self.request, unicode(self.course1.id))
cohort_handler(self.request, unicode(self.course2.id))
course_1_auto_cohort_1 = get_cohort_by_name(self.course1.id, "Course1AutoGroup1")
course_1_auto_cohort_2 = get_cohort_by_name(self.course1.id, "Course1AutoGroup2")
course_2_auto_cohort_1 = get_cohort_by_name(self.course2.id, "Course2AutoGroup1")
# forcefully add user1 in two auto cohorts
course_1_auto_cohort_1.users.add(self.user1)
course_1_auto_cohort_2.users.add(self.user1)
# forcefully add user2 in auto cohorts of both courses
course_1_auto_cohort_1.users.add(self.user2)
course_2_auto_cohort_1.users.add(self.user2)
# now check that when user1 goes on discussion page and tries to get
# cohorts 'MultipleObjectsReturned' exception is returned
with self.assertRaises(MultipleObjectsReturned):
get_cohort(self.user1, self.course1.id)
# also check that user 2 can go on discussion page of both courses
# without any exception
get_cohort(self.user2, self.course1.id)
get_cohort(self.user2, self.course2.id)
# call command to remove users added in multiple cohorts of a course
# are removed from all cohort groups
call_command('remove_users_from_multiple_cohorts')
# check that only user1 (with multiple cohorts) is removed from cohorts
# and user2 is still in auto cohorts of both course after running
# 'remove_users_from_multiple_cohorts' management command
self.assertEqual(self.user1.course_groups.count(), 0)
self.assertEqual(self.user2.course_groups.count(), 2)
user2_cohorts = list(self.user2.course_groups.values_list('name', flat=True))
self.assertEqual(user2_cohorts, ['Course1AutoGroup1', 'Course2AutoGroup1'])
# now check that user1 can get cohorts in which he is added
response = cohort_handler(self.request, unicode(self.course1.id))
self.assertEqual(response.status_code, 200)
| agpl-3.0 |
Rahulsharma0810/Scrapy-Python-TheHinduDailyNews | TheHinduDailyNews/settings.py | 1 | 3227 | # -*- coding: utf-8 -*-
# Scrapy settings for TheHinduDailyNews project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'TheHinduDailyNews'
SPIDER_MODULES = ['TheHinduDailyNews.spiders']
NEWSPIDER_MODULE = 'TheHinduDailyNews.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
# USER_AGENT = 'TheHinduDailyNews (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
# CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'TheHinduDailyNews.middlewares.ThehindudailynewsSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'TheHinduDailyNews.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'TheHinduDailyNews.pipelines.SomePipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| mit |
glennrub/micropython | tests/extmod/vfs_lfs.py | 10 | 3777 | # Test for VfsLittle using a RAM device
try:
import uos
uos.VfsLfs1
uos.VfsLfs2
except (ImportError, AttributeError):
print("SKIP")
raise SystemExit
class RAMBlockDevice:
ERASE_BLOCK_SIZE = 1024
def __init__(self, blocks):
self.data = bytearray(blocks * self.ERASE_BLOCK_SIZE)
def readblocks(self, block, buf, off):
addr = block * self.ERASE_BLOCK_SIZE + off
for i in range(len(buf)):
buf[i] = self.data[addr + i]
def writeblocks(self, block, buf, off):
addr = block * self.ERASE_BLOCK_SIZE + off
for i in range(len(buf)):
self.data[addr + i] = buf[i]
def ioctl(self, op, arg):
if op == 4: # block count
return len(self.data) // self.ERASE_BLOCK_SIZE
if op == 5: # block size
return self.ERASE_BLOCK_SIZE
if op == 6: # erase block
return 0
def print_stat(st, print_size=True):
# don't print times (just check that they have the correct type)
print(st[:6], st[6] if print_size else -1, type(st[7]), type(st[8]), type(st[9]))
def test(bdev, vfs_class):
print("test", vfs_class)
# mkfs
vfs_class.mkfs(bdev)
# construction
vfs = vfs_class(bdev)
# statvfs
print(vfs.statvfs("/"))
# open, write close
f = vfs.open("test", "w")
f.write("littlefs")
f.close()
# statvfs after creating a file
print(vfs.statvfs("/"))
# ilistdir
print(list(vfs.ilistdir()))
print(list(vfs.ilistdir("/")))
print(list(vfs.ilistdir(b"/")))
# mkdir, rmdir
vfs.mkdir("testdir")
print(list(vfs.ilistdir()))
print(sorted(list(vfs.ilistdir("testdir"))))
vfs.rmdir("testdir")
print(list(vfs.ilistdir()))
vfs.mkdir("testdir")
# stat a file
print_stat(vfs.stat("test"))
# stat a dir (size seems to vary on LFS2 so don't print that)
print_stat(vfs.stat("testdir"), False)
# read
with vfs.open("test", "r") as f:
print(f.read())
# create large file
with vfs.open("testbig", "w") as f:
data = "large012" * 32 * 16
print("data length:", len(data))
for i in range(4):
print("write", i)
f.write(data)
# stat after creating large file
print(vfs.statvfs("/"))
# rename
vfs.rename("testbig", "testbig2")
print(sorted(list(vfs.ilistdir())))
vfs.chdir("testdir")
vfs.rename("/testbig2", "testbig2")
print(sorted(list(vfs.ilistdir())))
vfs.rename("testbig2", "/testbig2")
vfs.chdir("/")
print(sorted(list(vfs.ilistdir())))
# remove
vfs.remove("testbig2")
print(sorted(list(vfs.ilistdir())))
# getcwd, chdir
vfs.mkdir("/testdir2")
vfs.mkdir("/testdir/subdir")
print(vfs.getcwd())
vfs.chdir("/testdir")
print(vfs.getcwd())
# create file in directory to make sure paths are relative
vfs.open("test2", "w").close()
print_stat(vfs.stat("test2"))
print_stat(vfs.stat("/testdir/test2"))
vfs.remove("test2")
# chdir back to root and remove testdir
vfs.chdir("/")
print(vfs.getcwd())
vfs.chdir("testdir")
print(vfs.getcwd())
vfs.chdir("..")
print(vfs.getcwd())
vfs.chdir("testdir/subdir")
print(vfs.getcwd())
vfs.chdir("../..")
print(vfs.getcwd())
vfs.chdir("/./testdir2")
print(vfs.getcwd())
vfs.chdir("../testdir")
print(vfs.getcwd())
vfs.chdir("../..")
print(vfs.getcwd())
vfs.chdir(".//testdir")
print(vfs.getcwd())
vfs.chdir("subdir/./")
print(vfs.getcwd())
vfs.chdir("/")
print(vfs.getcwd())
vfs.rmdir("testdir/subdir")
vfs.rmdir("testdir")
vfs.rmdir("testdir2")
bdev = RAMBlockDevice(30)
test(bdev, uos.VfsLfs1)
test(bdev, uos.VfsLfs2)
| mit |
Protoneer/Arduino | arduino-core/src/processing/app/i18n/python/requests/packages/charade/big5prober.py | 2931 | 1684 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import Big5SMModel
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(Big5SMModel)
self._mDistributionAnalyzer = Big5DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "Big5"
| lgpl-2.1 |
mypinballs/whirlwind | effects.py | 1 | 8263 | # Top Rollover Lanes
__author__="jim"
__date__ ="$Jan 18, 2011 1:36:37 PM$"
import procgame
import locale
from procgame import *
base_path = config.value_for_key_path('base_path')
game_path = base_path+"games/whirlwind/"
class Effects(game.Mode):
def __init__(self, game, priority):
super(Effects, self).__init__(game, priority)
def drive_lamp(self, lamp_name, style='on',time=2):
if style == 'slow':
self.game.lamps[lamp_name].schedule(schedule=0x00ff00ff, cycle_seconds=0, now=True)
elif style == 'medium':
self.game.lamps[lamp_name].schedule(schedule=0x0f0f0f0f, cycle_seconds=0, now=True)
elif style == 'fast':
self.game.lamps[lamp_name].schedule(schedule=0x99999999, cycle_seconds=0, now=True)
elif style == 'superfast':
self.game.lamps[lamp_name].schedule(schedule=0xaaaaaaaa, cycle_seconds=0, now=True)
elif style == 'on':
self.game.lamps[lamp_name].enable()
elif style == 'off':
self.off(lamp_name)
elif style == 'smarton':
self.game.lamps[lamp_name].schedule(schedule=0xaaaaaaaa, cycle_seconds=0, now=True)
self.cancel_delayed(lamp_name+'_on')
self.delay(name=lamp_name+'_on', event_type=None, delay=0.6, handler=self.game.lamps[lamp_name].enable)
elif style == 'timedon':
self.game.lamps[lamp_name].enable()
self.cancel_delayed(lamp_name+'_off')
self.delay(name=lamp_name+'_off', event_type=None, delay=time, handler=self.off,param=lamp_name)
elif style == 'timeout':
if time>10:
self.cancel_delayed(lamp_name+'_medium')
self.delay(name=lamp_name+'_medium', event_type=None, delay=time-10, handler=lambda:self.drive_lamp(lamp_name,'medium'))
if time>5:
self.cancel_delayed(lamp_name+'_fast')
self.delay(name=lamp_name+'_fast', event_type=None, delay=time-5, handler=lambda:self.drive_lamp(lamp_name,'fast'))
if time>1:
self.cancel_delayed(lamp_name+'_superfast')
self.delay(name=lamp_name+'_superfast', event_type=None, delay=time-1, handler=lambda:self.drive_lamp(lamp_name,'superfast'))
self.delay(name=lamp_name+'_off', event_type=None, delay=time, handler=self.off,param=lamp_name)
def clear_lamp_timers(self,lamp_name):
self.cancel_delayed(lamp_name+'_medium')
self.cancel_delayed(lamp_name+'_fast')
self.cancel_delayed(lamp_name+'_superfast')
self.cancel_delayed(lamp_name+'on')
self.cancel_delayed(lamp_name+'_off')
def off(self,lamp_name):
self.clear_lamp_timers(lamp_name)
self.game.lamps[lamp_name].disable()
# def drive_super_fast(self, lamp_name):
# self.game.lamps[lamp_name].schedule(schedule=0x99999999, cycle_seconds=0, now=True)
#
# def drive_fast(self, lamp_name):
# self.game.lamps[lamp_name].schedule(schedule=0x55555555, cycle_seconds=0, now=True)
#
# def drive_medium(self, lamp_name):
# self.game.lamps[lamp_name].schedule(schedule=0x0f0f0f0f, cycle_seconds=0, now=True)
def drive_flasher(self, data, style='medium',cycle=0,time=2):
if isinstance(data, basestring):
flasher_name=data
else:
flasher_name=data[0]
style = data[1]
time = data[2]
if style == 'slow':
self.game.coils[flasher_name].schedule(schedule=0x00003000, cycle_seconds=cycle, now=True)
elif style == 'medium':
self.game.coils[flasher_name].schedule(schedule=0x30003000, cycle_seconds=cycle, now=True)
elif style == 'fast':
self.game.coils[flasher_name].schedule(schedule=0x11111111, cycle_seconds=cycle, now=True)
elif style == 'super':
self.game.coils[flasher_name].schedule(schedule=0x55555555, cycle_seconds=cycle, now=True)
elif style == 'super2':
self.game.coils[flasher_name].schedule(schedule=0x55055055, cycle_seconds=cycle, now=True)
elif style == 'strobe':
self.game.coils[flasher_name].schedule(schedule=0xeeeeeeee, cycle_seconds=cycle, now=True)
elif style == 'chaos':
self.game.coils[flasher_name].schedule(schedule=0x019930AB, cycle_seconds=cycle, now=True)
elif style == 'fade':
self.game.coils[flasher_name].schedule(schedule=0xAAA99933, cycle_seconds=cycle, now=True)
if time>0:
self.delay(name=flasher_name+'_off', event_type=None, delay=time, handler=self.game.coils[flasher_name].disable)
# def strobe_flasher_set(self,flasher_list,time=0.5):
# timer = 0
# for fname in flasher_list:
# self.delay(name=fname+'strobe', event_type=None, delay=timer, handler=self.drive_flasher, param=[fname,'fast',time])
# timer+=time
def strobe_flasher_set(self,flasher_list,time=1,overlap=0.2,repeats=1,enable=True):
timer = 0
for i in range(repeats):
for fname in flasher_list:
if enable:
self.delay(name=fname+'strobe', event_type=None, delay=timer, handler=self.drive_flasher, param=[fname,'fast',time+overlap])
timer+=time
else:
self.cancel_delayed(fname+'strobe')
self.game.coils[fname].disable()
def strobe_controlled_flasher_set(self,flasher_list,time=0.1,overlap=0.2,repeats=1,enable=True):
timer = 0
#playfield flashers
sequence=[]
for j in range(repeats):
sequence += flasher_list
for i in range(len(sequence)):
def flash(i,time,delay):
self.delay(delay=delay,handler=lambda:self.game.switched_coils.drive(name=sequence[i],style='fast',time=time+0.1))
flash(i,time,timer)
timer+=time
def drive_led(self,lamp_name,colour):
if colour=='red':
self.led_colour_data(lamp_name,'on','off','off')
elif colour=='pink':
self.led_colour_data(lamp_name,'on','off','med')
elif colour=='magenta':
self.led_colour_data(lamp_name,'on','off','on')
elif colour=='purple':
self.led_colour_data(lamp_name,'med','off','on')
elif colour=='skyblue':
self.led_colour_data(lamp_name,'off','med','on')
elif colour=='blue':
self.led_colour_data(lamp_name,'off','off','on')
elif colour=='cyan':
self.led_colour_data(lamp_name,'off','on','on')
elif colour=='turquoise':
self.led_colour_data(lamp_name,'off','on','med')
elif colour=='green':
self.led_colour_data(lamp_name,'off','on','off')
elif colour=='limegreen':
self.led_colour_data(lamp_name,'med','on','off')
elif colour=='yellow':
self.led_colour_data(lamp_name,'on','on','off')
elif colour=='orange':
self.led_colour_data(lamp_name,'on','med','off')
elif colour=='white':
self.led_colour_data(lamp_name,'on','on','on')
elif colour=='black':
self.led_colour_data(lamp_name,'off','off','off')
def led_colour_data(self,lamp_name,red,blue,green):
data=[red,green,blue]
name=['Red','Green','Blue']
for i in range(len(data)):
if data[i]=='off':
self.game.lamps[lamp_name+name[i]].disable()
elif data[i]=='on':
self.game.lamps[lamp_name+name[i]].enable()
elif data[i]=='med':
self.game.lamps[lamp_name+name[i]].schedule(schedule=0x80808080, cycle_seconds=0, now=True)
# self.game.lamps[lamp_name+name[i]].patter()
| gpl-3.0 |
aayush2911/Fibonaccio | web2py/gluon/contrib/memcache/__init__.py | 40 | 3734 | from gluon.contrib.memcache.memcache import Client
from gluon.cache import CacheAbstract
import time
"""
examle of usage:
cache.memcache = MemcacheClient(request,[127.0.0.1:11211],debug=true)
"""
import cPickle as pickle
import thread
from gluon import current
DEFAULT_TIME_EXPIRE = 300 # seconds (must be the same as cache.ram)
def MemcacheClient(*a, **b):
if not hasattr(current,'__memcache_client'):
current.__memcache_client = MemcacheClientObj(*a, **b)
return current.__memcache_client
class MemcacheClientObj(Client):
meta_storage = {}
max_time_expire = 24*3600
def __init__(self, request, servers, debug=0, pickleProtocol=0,
pickler=pickle.Pickler, unpickler=pickle.Unpickler,
pload=None, pid=None,
default_time_expire = DEFAULT_TIME_EXPIRE):
self.request=request
self.default_time_expire = default_time_expire
if request:
app = request.application
else:
app = ''
Client.__init__(self, servers, debug, pickleProtocol,
pickler, unpickler, pload, pid)
if not app in self.meta_storage:
self.storage = self.meta_storage[app] = {
CacheAbstract.cache_stats_name: {
'hit_total': 0,
'misses': 0,
}}
else:
self.storage = self.meta_storage[app]
def __call__(self, key, f, time_expire = 'default'):
if time_expire == 'default':
time_expire = self.default_time_expire
if time_expire == None:
time_expire = self.max_time_expire
# this must be commented because get and set are redefined
# key = self.__keyFormat__(key)
now = time.time()
value = None
if f is None: # force deletion of value
self.delete(key)
return None
elif time_expire==0: # value forced expired
item = None # value to be computed
else:
item = self.get(key)
if item:
if not isinstance(item,(list,tuple)):
value = item
elif (item[0] < now - time_expire): # value expired
item = None # value to be computed
else:
value = item[1]
if not item:
value = f()
self.set(key, (now,value), self.max_time_expire)
return value
def increment(self, key, value=1, time_expire='default'):
""" time_expire is ignored """
if time_expire == 'default':
time_expire = self.default_time_expire
newKey = self.__keyFormat__(key)
obj = Client.get(self, newKey)
if obj:
if isinstance(obj,(int,float,long)):
return Client.incr(self, newKey, value)
else:
value += obj[1]
Client.set(self,newKey,(time.time(),value),
self.max_time_expire)
return value
else:
Client.set(self, newKey, value, self.max_time_expire)
return value
def set(self, key, value, time_expire='default'):
if time_expire == 'default':
time_expire = self.default_time_expire
newKey = self.__keyFormat__(key)
return Client.set(self, newKey, value, time_expire)
def get(self, key):
newKey = self.__keyFormat__(key)
return Client.get(self, newKey)
def delete(self, key):
newKey = self.__keyFormat__(key)
return Client.delete(self, newKey)
def __keyFormat__(self, key):
return '%s/%s' % (self.request.application, key.replace(' ', '_'))
| gpl-2.0 |
TheTypoMaster/my-vim-set-mac | .vim/bundle/YouCompleteMe/third_party/ycmd/third_party/bottle/test/test_importhook.py | 50 | 1358 | # -*- coding: utf-8 -*-
import unittest
import sys, os
import imp
class TestImportHooks(unittest.TestCase):
def make_module(self, name, **args):
mod = sys.modules.setdefault(name, imp.new_module(name))
mod.__file__ = '<virtual %s>' % name
mod.__dict__.update(**args)
return mod
def test_direkt_import(self):
mod = self.make_module('bottle_test')
import bottle.ext.test
self.assertEqual(bottle.ext.test, mod)
def test_from_import(self):
mod = self.make_module('bottle_test')
from bottle.ext import test
self.assertEqual(test, mod)
def test_data_import(self):
mod = self.make_module('bottle_test', item='value')
from bottle.ext.test import item
self.assertEqual(item, 'value')
def test_import_fail(self):
''' Test a simple static page with this server adapter. '''
def test():
import bottle.ext.doesnotexist
self.assertRaises(ImportError, test)
def test_ext_isfile(self):
''' The virtual module needs a valid __file__ attribute.
If not, the Google app engine development server crashes on windows.
'''
from bottle import ext
self.assertTrue(os.path.isfile(ext.__file__))
if __name__ == '__main__': #pragma: no cover
unittest.main()
| gpl-2.0 |
Cisco-Talos/pyrebox | volatility/setup.py | 12 | 3606 | #!/usr/bin/env python
# Volatility
#
# Authors:
# AAron Walters <[email protected]>
# Mike Auty <[email protected]>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import volatility.constants
import sys
import os
py2exe_available = True
try:
import py2exe #pylint: disable-msg=W0611,F0401
except ImportError:
py2exe_available = False
def find_files(topdirs, py = False):
"""Lists all python files under any topdir from the topdirs lists.
Returns an appropriate list for data_files,
with source and destination directories the same"""
ret = []
for topdir in topdirs:
for r, _ds, fs in os.walk(topdir):
ret.append((r, [ os.path.join(r, f) for f in fs if (f.endswith('.py') or not py)]))
return ret
opts = {}
opts['name'] = "volatility"
opts['version'] = volatility.constants.VERSION
opts['description'] = "Volatility -- Volatile memory framework"
opts['author'] = "AAron Walters"
opts['author_email'] = "[email protected]"
opts['url'] = "http://www.volatilityfoundation.org"
opts['license'] = "GPL"
opts['scripts'] = ["vol.py"]
opts['packages'] = ["volatility",
"volatility.win32",
"volatility.renderers",
"volatility.plugins",
"volatility.plugins.addrspaces",
"volatility.plugins.overlays",
"volatility.plugins.overlays.windows",
"volatility.plugins.overlays.linux",
"volatility.plugins.overlays.mac",
"volatility.plugins.gui",
"volatility.plugins.gui.vtypes",
"volatility.plugins.linux",
"volatility.plugins.registry",
"volatility.plugins.malware",
"volatility.plugins.mac"]
opts['data_files'] = find_files(['contrib'], py = True) + find_files(['tools'])
if py2exe_available:
py2exe_distdir = 'dist/py2exe'
opts['console'] = [{ 'script': 'vol.py',
'icon_resources': [(1, 'resources/volatility.ico')]
}]
# Optimize must be 1 for plugins that use docstring for the help value,
# otherwise the help gets optimized out
opts['options'] = {'py2exe':{'optimize': 1,
'dist_dir': py2exe_distdir,
'packages': opts['packages'] + ['socket', 'ctypes', 'Crypto.Cipher', 'urllib', 'distorm3', 'yara', 'xml.etree.ElementTree'],
# This, along with zipfile = None, ensures a single binary
'bundle_files': 1,
}
}
opts['zipfile'] = None
distrib = setup(**opts) #pylint: disable-msg=W0142
if 'py2exe' in sys.argv:
# Any py2exe specific files or things that need doing can go in here
pass
| gpl-2.0 |
lilmuck/lilmuck | plugin.video.szenestreams/default.py | 1 | 6874 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib,urllib2,re,xbmcaddon,xbmcplugin,xbmcgui,xbmc,HTMLParser
from stream import *
htmlparser = HTMLParser.HTMLParser()
pluginhandle = int(sys.argv[1])
itemcnt = 0
baseurl = 'http://www.szene-streams.com'
settings = xbmcaddon.Addon(id='plugin.video.szene-streams')
maxitems = (int(settings.getSetting("items_per_page"))+1)*10
filterUnknownHoster = settings.getSetting("filterUnknownHoster") == 'true'
forceMovieViewMode = settings.getSetting("forceMovieViewMode") == 'true'
movieViewMode = str(settings.getSetting("movieViewMode"))
dbg = False
def CATEGORIES():
data = getUrl(baseurl)
cats = re.findall('<a[^>]*?class="CatInf"[^>]*?href="(.*?)"[^>]*?>.*?<div class="CatNumInf">(.*?)</div>[^<]*?<div[^>]*?class="CatNameInf">(.*?)</div>', data, re.S|re.I)
addDir('Letzte Updates', baseurl, 1, '', True)
addDir('Serien', baseurl + '/load', 0, '', True)
for (url, num, name) in cats:
if 'http:' not in url: url = baseurl + url
addDir(name + ' [COLOR=blue](' + num + ')[/COLOR]', url, 1, '', True)
xbmc.executebuiltin("Container.SetViewMode(400)")
def SERIES(url):
data = getUrl(url)
cats = re.findall('<a[^>]*?class="CatInf"[^>]*?href="(.*?)"[^>]*?>.*?<div class="CatNumInf">(.*?)</div>[^<]*?<div[^>]*?class="CatNameInf">(.*?)</div>', data, re.S|re.I)
addDir('Letzte Updates', baseurl + '/load/0-1', 1, '', True)
for (url, num, name) in cats:
if 'http:' not in url: url = baseurl + url
addDir(name + ' [COLOR=blue](' + num + ')[/COLOR]', url, 1, '', True)
xbmc.executebuiltin("Container.SetViewMode(400)")
def INDEX(url):
global itemcnt
nextPageUrl = re.sub('-[\d]+$', '', url)
print url
data = getUrl(url)
movies = re.findall('<div class="ImgWrapNews">[^<]*<a[^<]*<img[^>]*src="([^"]*.[jpg|png])"[^>]*alt="([^"]*)"[^>]*>.*?class="[^"]*entryLink[^"]*".*?href="([^"]*)"', data, re.S|re.I)
if movies:
for (image, title, url) in movies:
if 'http:' not in url: url = baseurl + url
addDir(clean(title), url, 2, image, True)
itemcnt = itemcnt + 1
nextPage = re.findall('<a class="swchItem"[^>]*onclick="spages\(\'(\d+)\'[^>]*?"[^>]*><span>»</span>', data, re.S)
if nextPage:
if itemcnt >= maxitems:
addDir('Weiter >>', nextPageUrl + '-' + nextPage[0], 1, '', True)
else:
INDEX(nextPageUrl + '-' + nextPage[0])
if forceMovieViewMode: xbmc.executebuiltin("Container.SetViewMode(" + movieViewMode + ")")
def VIDEOLINKS(url, image):
data = getUrl(url)
streams = []
raw = re.findall('(<fieldset[^>]*>[^<]*<legend>.*?</fieldset>)', data, re.S)
if raw:
for each in raw:
series = re.findall('<div class="spoiler"><font[^>]*><b[^>]*>(.+?)</b>(.*?)<input', each, re.S|re.I)
if not series: series = re.findall('<legend>(.+?)</legend>[^<]*<div class="spoiler">(.*?)<input', each, re.S|re.I)
if not series: series = re.findall('<legend>(.+?)</legend>.*?(<iframe.*?</iframe>|<a[^>]*href=".+"[^>]*>).*', each, re.S|re.I)
if series:
for ser in series:
for (s, n) in re.findall('<a[^>]*href="([^"]+)"[^>]*>([^<]*)<', each, re.S|re.I):
if dbg: print 'ser1'
if ser: n = clean(ser[1]) + ' ' + extractFilename(s)
n = clean(n) if n else extractFilename(s)
if n: streams += [(n, s)]
for s in re.findall('<iframe[^>]*src="([^"]*)"[^>]*>', each, re.S|re.I):
if dbg: print 'ser2'
if ser: n = clean(ser[1])
if not n: n = 'unknown'
if n: streams += [(n, s)]
elif re.match('.*?iframe.*?src.*', each, re.S|re.I):
if dbg: print 'nonser1'
streams += re.findall('<font[^>]*>.*?src=".*?/player/(.*?)\..{3}".*?<iframe.*?src=["|\'](.*?)["|\']', each, re.S|re.I)
else:
if dbg: print 'nonser2'
streams += re.findall('<font[^>]*>.*?src=".*?/player/(.*?)\..{3}".*?</font>.*?target="_blank" href=["|\'](.*?)["|\']', each, re.S|re.I)
if streams:
for (filename, stream) in streams:
hoster = get_stream_link().get_hostername(stream)
if filterUnknownHoster and hoster == 'Not Supported': continue
entry = '[COLOR=blue](' + hoster + ')[/COLOR] ' + filename
addLink(entry, clean(stream), 3, image)
def clean(s):
try: s = htmlparser.unescape(s)
except: print "could not unescape string '%s'"%(s)
s = re.sub('<[^>]*>', '', s)
s = s.replace('_', ' ')
s = re.sub('[ ]+', ' ', s)
for hit in set(re.findall("&#\d+;", s)):
try: s = s.replace(hit, unichr(int(hit[2:-1])))
except ValueError: pass
return s.strip('\n').strip()
def extractFilename(path):
path = re.sub('^.*/', '',clean(path)).replace('.html', '').replace('_', ' ')
return re.sub('\.[a-zA-Z]{3}', '', path)
def GETLINK(url):
stream_url = get_stream_link().get_stream(url)
if stream_url:
if re.match('^Error: ', stream_url, re.S|re.I):
xbmc.executebuiltin("XBMC.Notification(Fehler!, " + re.sub('^Error: ','',stream_url) + ", 4000)")
else:
listitem = xbmcgui.ListItem(path=stream_url)
return xbmcplugin.setResolvedUrl(pluginhandle, True, listitem)
def getUrl(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
data = response.read()
response.close()
return data
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
def addLink(name, url, mode, image):
u = sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)
liz = xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=image)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('IsPlayable', 'true')
return xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz)
def addDir(name, url, mode, image, is_folder=False):
u = sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&image="+urllib.quote_plus(image)
liz = xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=image)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
return xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=is_folder)
params = get_params()
url = mode = image = None
try: url = urllib.unquote_plus(params["url"])
except: pass
try: mode = int(params["mode"])
except: pass
try: image = urllib.unquote_plus(params["image"])
except: pass
if mode==None or url==None or len(url)<1: CATEGORIES()
elif mode==0: SERIES(url)
elif mode==1: INDEX(url)
elif mode==2: VIDEOLINKS(url, image)
elif mode==3: GETLINK(url)
xbmcplugin.endOfDirectory(int(sys.argv[1])) | gpl-2.0 |
tseaver/gcloud-python | videointelligence/nox.py | 1 | 2462 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import nox
@nox.session
def default(session):
return unit(session, 'default')
@nox.session
@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7'])
def unit(session, py):
"""Run the unit test suite."""
# Run unit tests against all supported versions of Python.
if py != 'default':
session.interpreter = 'python{}'.format(py)
# Set the virtualenv directory name.
session.virtualenv_dirname = 'unit-' + py
# Install all test dependencies, then install this package in-place.
session.install('pytest')
session.install('-e', '.')
# Run py.test against the unit tests.
session.run('py.test', '--quiet', os.path.join('tests', 'unit'))
# TODO: Fix generated system tests
#@nox.session
#@nox.parametrize('py', ['2.7', '3.7'])
#def system(session, py):
# """Run the system test suite."""
#
# # Sanity check: Only run system tests if the environment variable is set.
# if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''):
# session.skip('Credentials must be set via environment variable.')
#
# # Run unit tests against all supported versions of Python.
# session.interpreter = 'python{}'.format(py)
#
# # Set the virtualenv dirname.
# session.virtualenv_dirname = 'sys-' + py
#
# # Install all test dependencies, then install this package in-place.
# session.install('pytest')
# session.install('-e', '.')
#
# # Run py.test against the unit tests.
# session.run('py.test', '--quiet', os.path.join('tests', 'system'),
# *session.posargs)
@nox.session
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.interpreter = 'python3.6'
session.install('docutils', 'pygments')
session.run('python', 'setup.py', 'check', '--restructuredtext',
'--strict')
| apache-2.0 |
andrewcmyers/tensorflow | tensorflow/python/kernel_tests/metrics_test.py | 28 | 139808 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for metrics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import math
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
import tensorflow.python.ops.data_flow_grad # pylint: disable=unused-import
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
NAN = float('nan')
def _enqueue_vector(sess, queue, values, shape=None):
if not shape:
shape = (1, len(values))
dtype = queue.dtypes[0]
sess.run(
queue.enqueue(constant_op.constant(
values, dtype=dtype, shape=shape)))
def _binary_2d_label_to_2d_sparse_value(labels):
"""Convert dense 2D binary indicator to sparse ID.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator, shape [batch_size, num_classes].
Returns:
`SparseTensorValue` of shape [batch_size, num_classes], where num_classes
is the number of `1` values in each row of `labels`. Values are indices
of `1` values along the last dimension of `labels`.
"""
indices = []
values = []
batch = 0
for row in labels:
label = 0
xi = 0
for x in row:
if x == 1:
indices.append([batch, xi])
values.append(label)
xi += 1
else:
assert x == 0
label += 1
batch += 1
shape = [len(labels), len(labels[0])]
return sparse_tensor.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64), np.array(shape, np.int64))
def _binary_2d_label_to_1d_sparse_value(labels):
"""Convert dense 2D binary indicator to sparse ID.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator, shape [batch_size, num_classes]. Each
row must contain exactly 1 `1` value.
Returns:
`SparseTensorValue` of shape [batch_size]. Values are indices of `1` values
along the last dimension of `labels`.
Raises:
ValueError: if there is not exactly 1 `1` value per row of `labels`.
"""
indices = []
values = []
batch = 0
for row in labels:
label = 0
xi = 0
for x in row:
if x == 1:
indices.append([batch])
values.append(label)
xi += 1
else:
assert x == 0
label += 1
batch += 1
if indices != [[i] for i in range(len(labels))]:
raise ValueError('Expected 1 label/example, got %s.' % indices)
shape = [len(labels)]
return sparse_tensor.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64), np.array(shape, np.int64))
def _binary_3d_label_to_sparse_value(labels):
"""Convert dense 3D binary indicator tensor to sparse tensor.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator tensor.
Returns:
`SparseTensorValue` whose values are indices along the last dimension of
`labels`.
"""
indices = []
values = []
for d0, labels_d0 in enumerate(labels):
for d1, labels_d1 in enumerate(labels_d0):
d2 = 0
for class_id, label in enumerate(labels_d1):
if label == 1:
values.append(class_id)
indices.append([d0, d1, d2])
d2 += 1
else:
assert label == 0
shape = [len(labels), len(labels[0]), len(labels[0][0])]
return sparse_tensor.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64), np.array(shape, np.int64))
def _assert_nan(test_case, actual):
test_case.assertTrue(math.isnan(actual), 'Expected NAN, got %s.' % actual)
def _assert_local_variables(test_case, expected):
test_case.assertEquals(
set(expected), set(v.name for v in variables.local_variables()))
def _test_values(shape):
return np.reshape(np.cumsum(np.ones(shape)), newshape=shape)
class MeanTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean(array_ops.ones([4, 3]))
_assert_local_variables(self, ('mean/count:0', 'mean/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean(
array_ops.ones([4, 3]), metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean(
array_ops.ones([4, 3]), updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testBasic(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean(values)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAlmostEqual(1.65, sess.run(mean), 5)
def testUpdateOpsReturnsCurrentValue(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean(values)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op), 5)
self.assertAlmostEqual(1.475, sess.run(update_op), 5)
self.assertAlmostEqual(12.4 / 6.0, sess.run(update_op), 5)
self.assertAlmostEqual(1.65, sess.run(update_op), 5)
self.assertAlmostEqual(1.65, sess.run(mean), 5)
def testUnweighted(self):
values = _test_values((3, 2, 4, 1))
mean_results = (
metrics.mean(values),
metrics.mean(values, weights=1.0),
metrics.mean(values, weights=np.ones((1, 1, 1))),
metrics.mean(values, weights=np.ones((1, 1, 1, 1))),
metrics.mean(values, weights=np.ones((1, 1, 1, 1, 1))),
metrics.mean(values, weights=np.ones((1, 1, 4))),
metrics.mean(values, weights=np.ones((1, 1, 4, 1))),
metrics.mean(values, weights=np.ones((1, 2, 1))),
metrics.mean(values, weights=np.ones((1, 2, 1, 1))),
metrics.mean(values, weights=np.ones((1, 2, 4))),
metrics.mean(values, weights=np.ones((1, 2, 4, 1))),
metrics.mean(values, weights=np.ones((3, 1, 1))),
metrics.mean(values, weights=np.ones((3, 1, 1, 1))),
metrics.mean(values, weights=np.ones((3, 1, 4))),
metrics.mean(values, weights=np.ones((3, 1, 4, 1))),
metrics.mean(values, weights=np.ones((3, 2, 1))),
metrics.mean(values, weights=np.ones((3, 2, 1, 1))),
metrics.mean(values, weights=np.ones((3, 2, 4))),
metrics.mean(values, weights=np.ones((3, 2, 4, 1))),
metrics.mean(values, weights=np.ones((3, 2, 4, 1, 1))),)
expected = np.mean(values)
with self.test_session():
variables.local_variables_initializer().run()
for mean_result in mean_results:
mean, update_op = mean_result
self.assertAlmostEqual(expected, update_op.eval())
self.assertAlmostEqual(expected, mean.eval())
def _test_3d_weighted(self, values, weights):
expected = (
np.sum(np.multiply(weights, values)) /
np.sum(np.multiply(weights, np.ones_like(values)))
)
mean, update_op = metrics.mean(values, weights=weights)
with self.test_session():
variables.local_variables_initializer().run()
self.assertAlmostEqual(expected, update_op.eval(), places=5)
self.assertAlmostEqual(expected, mean.eval(), places=5)
def test1x1x1Weighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5,)).reshape((1, 1, 1)))
def test1x1xNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 7, 11, 3)).reshape((1, 1, 4)))
def test1xNx1Weighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 11)).reshape((1, 2, 1)))
def test1xNxNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 7, 11, 3, 2, 13, 7, 5)).reshape((1, 2, 4)))
def testNx1x1Weighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 7, 11)).reshape((3, 1, 1)))
def testNx1xNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3)).reshape((3, 1, 4)))
def testNxNxNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((3, 2, 4)))
def testInvalidWeights(self):
values_placeholder = array_ops.placeholder(dtype=dtypes_lib.float32)
values = _test_values((3, 2, 4, 1))
invalid_weights = (
(1,),
(1, 1),
(3, 2),
(2, 4, 1),
(4, 2, 4, 1),
(3, 3, 4, 1),
(3, 2, 5, 1),
(3, 2, 4, 2),
(1, 1, 1, 1, 1))
expected_error_msg = 'weights can not be broadcast to values'
for invalid_weight in invalid_weights:
# Static shapes.
with self.assertRaisesRegexp(ValueError, expected_error_msg):
metrics.mean(values, invalid_weight)
# Dynamic shapes.
with self.assertRaisesRegexp(errors_impl.OpError, expected_error_msg):
with self.test_session():
_, update_op = metrics.mean(values_placeholder, invalid_weight)
variables.local_variables_initializer().run()
update_op.eval(feed_dict={values_placeholder: values})
class MeanTensorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_tensor(array_ops.ones([4, 3]))
_assert_local_variables(self, ('mean/total_tensor:0',
'mean/count_tensor:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_tensor(
array_ops.ones([4, 3]), metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_tensor(
array_ops.ones([4, 3]), updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testBasic(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean_tensor(values)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(mean))
def testMultiDimensional(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(2, 2, 2))
_enqueue_vector(
sess,
values_queue, [[[1, 2], [1, 2]], [[1, 2], [1, 2]]],
shape=(2, 2, 2))
_enqueue_vector(
sess,
values_queue, [[[1, 2], [1, 2]], [[3, 4], [9, 10]]],
shape=(2, 2, 2))
values = values_queue.dequeue()
mean, update_op = metrics.mean_tensor(values)
sess.run(variables.local_variables_initializer())
for _ in range(2):
sess.run(update_op)
self.assertAllClose([[[1, 2], [1, 2]], [[2, 3], [5, 6]]], sess.run(mean))
def testUpdateOpsReturnsCurrentValue(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean_tensor(values)
sess.run(variables.local_variables_initializer())
self.assertAllClose([[0, 1]], sess.run(update_op), 5)
self.assertAllClose([[-2.1, 5.05]], sess.run(update_op), 5)
self.assertAllClose([[2.3 / 3., 10.1 / 3.]], sess.run(update_op), 5)
self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(update_op), 5)
self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(mean), 5)
def testWeighted1d(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [[1]])
_enqueue_vector(sess, weights_queue, [[0]])
_enqueue_vector(sess, weights_queue, [[1]])
_enqueue_vector(sess, weights_queue, [[0]])
weights = weights_queue.dequeue()
mean, update_op = metrics.mean_tensor(values, weights)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[3.25, 0.5]], sess.run(mean), 5)
def testWeighted2d_1(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [1, 1])
_enqueue_vector(sess, weights_queue, [1, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.mean_tensor(values, weights)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[-2.1, 0.5]], sess.run(mean), 5)
def testWeighted2d_2(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.mean_tensor(values, weights)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[0, 0.5]], sess.run(mean), 5)
class AccuracyTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.accuracy(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
name='my_accuracy')
_assert_local_variables(self, ('my_accuracy/count:0',
'my_accuracy/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.accuracy(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.accuracy(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones((10, 3))
labels = array_ops.ones((10, 4))
with self.assertRaises(ValueError):
metrics.accuracy(labels, predictions)
def testPredictionsAndWeightsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones((10, 3))
labels = array_ops.ones((10, 3))
weights = array_ops.ones((9, 3))
with self.assertRaises(ValueError):
metrics.accuracy(labels, predictions, weights)
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=3, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=3, dtype=dtypes_lib.int64, seed=1)
accuracy, update_op = metrics.accuracy(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_accuracy = accuracy.eval()
for _ in range(10):
self.assertEqual(initial_accuracy, accuracy.eval())
def testMultipleUpdates(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
labels = labels_queue.dequeue()
accuracy, update_op = metrics.accuracy(labels, predictions)
sess.run(variables.local_variables_initializer())
for _ in xrange(3):
sess.run(update_op)
self.assertEqual(0.5, sess.run(update_op))
self.assertEqual(0.5, accuracy.eval())
def testEffectivelyEquivalentSizes(self):
predictions = array_ops.ones((40, 1))
labels = array_ops.ones((40,))
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertEqual(1.0, update_op.eval())
self.assertEqual(1.0, accuracy.eval())
def testEffectivelyEquivalentSizesWithScalarWeight(self):
predictions = array_ops.ones((40, 1))
labels = array_ops.ones((40,))
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions, weights=2.0)
sess.run(variables.local_variables_initializer())
self.assertEqual(1.0, update_op.eval())
self.assertEqual(1.0, accuracy.eval())
def testEffectivelyEquivalentSizesWithStaticShapedWeight(self):
predictions = ops.convert_to_tensor([1, 1, 1]) # shape 3,
labels = array_ops.expand_dims(ops.convert_to_tensor([1, 0, 0]),
1) # shape 3, 1
weights = array_ops.expand_dims(ops.convert_to_tensor([100, 1, 1]),
1) # shape 3, 1
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions, weights)
sess.run(variables.local_variables_initializer())
# if streaming_accuracy does not flatten the weight, accuracy would be
# 0.33333334 due to an intended broadcast of weight. Due to flattening,
# it will be higher than .95
self.assertGreater(update_op.eval(), .95)
self.assertGreater(accuracy.eval(), .95)
def testEffectivelyEquivalentSizesWithDynamicallyShapedWeight(self):
predictions = ops.convert_to_tensor([1, 1, 1]) # shape 3,
labels = array_ops.expand_dims(ops.convert_to_tensor([1, 0, 0]),
1) # shape 3, 1
weights = [[100], [1], [1]] # shape 3, 1
weights_placeholder = array_ops.placeholder(
dtype=dtypes_lib.int32, name='weights')
feed_dict = {weights_placeholder: weights}
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions,
weights_placeholder)
sess.run(variables.local_variables_initializer())
# if streaming_accuracy does not flatten the weight, accuracy would be
# 0.33333334 due to an intended broadcast of weight. Due to flattening,
# it will be higher than .95
self.assertGreater(update_op.eval(feed_dict=feed_dict), .95)
self.assertGreater(accuracy.eval(feed_dict=feed_dict), .95)
def testMultipleUpdatesWithWeightedValues(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
labels = labels_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.int64, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [0])
_enqueue_vector(sess, weights_queue, [0])
weights = weights_queue.dequeue()
accuracy, update_op = metrics.accuracy(labels, predictions, weights)
sess.run(variables.local_variables_initializer())
for _ in xrange(3):
sess.run(update_op)
self.assertEqual(1.0, sess.run(update_op))
self.assertEqual(1.0, accuracy.eval())
class PrecisionTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.precision(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('precision/false_positives/count:0',
'precision/true_positives/count:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.precision(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.precision(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_precision = precision.eval()
for _ in range(10):
self.assertEqual(initial_precision, precision.eval())
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs)
labels = constant_op.constant(inputs)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op))
self.assertAlmostEqual(1, precision.eval())
def testSomeCorrect_multipleInputDtypes(self):
for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = math_ops.cast(
constant_op.constant([1, 0, 1, 0], shape=(1, 4)), dtype=dtype)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=dtype)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, update_op.eval())
self.assertAlmostEqual(0.5, precision.eval())
def testWeighted1d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
precision, update_op = metrics.precision(
labels, predictions, weights=constant_op.constant([[2], [5]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 2.0 + 5.0
weighted_positives = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, precision.eval())
def testWeightedScalar_placeholders(self):
predictions = array_ops.placeholder(dtype=dtypes_lib.float32)
labels = array_ops.placeholder(dtype=dtypes_lib.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.precision(labels, predictions, weights=2)
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 2.0 + 2.0
weighted_positives = (2.0 + 2.0) + (2.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testWeighted1d_placeholders(self):
predictions = array_ops.placeholder(dtype=dtypes_lib.float32)
labels = array_ops.placeholder(dtype=dtypes_lib.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.precision(
labels, predictions, weights=constant_op.constant([[2], [5]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 2.0 + 5.0
weighted_positives = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testWeighted2d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
precision, update_op = metrics.precision(
labels,
predictions,
weights=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 3.0 + 4.0
weighted_positives = (1.0 + 3.0) + (4.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, precision.eval())
def testWeighted2d_placeholders(self):
predictions = array_ops.placeholder(dtype=dtypes_lib.float32)
labels = array_ops.placeholder(dtype=dtypes_lib.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.precision(
labels,
predictions,
weights=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 3.0 + 4.0
weighted_positives = (1.0 + 3.0) + (4.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs)
labels = constant_op.constant(1 - inputs)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertAlmostEqual(0, precision.eval())
def testZeroTrueAndFalsePositivesGivesZeroPrecision(self):
predictions = constant_op.constant([0, 0, 0, 0])
labels = constant_op.constant([0, 0, 0, 0])
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0.0, precision.eval())
class RecallTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.recall(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('recall/false_negatives/count:0',
'recall/true_positives/count:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.recall(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.recall(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_recall = recall.eval()
for _ in range(10):
self.assertEqual(initial_recall, recall.eval())
def testAllCorrect(self):
np_inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(np_inputs)
labels = constant_op.constant(np_inputs)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(1, recall.eval())
def testSomeCorrect_multipleInputDtypes(self):
for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = math_ops.cast(
constant_op.constant([1, 0, 1, 0], shape=(1, 4)), dtype=dtype)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=dtype)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, update_op.eval())
self.assertAlmostEqual(0.5, recall.eval())
def testWeighted1d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
weights = constant_op.constant([[2], [5]])
recall, update_op = metrics.recall(labels, predictions, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
weighted_tp = 2.0 + 5.0
weighted_t = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_t
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, recall.eval())
def testWeighted2d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
weights = constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]])
recall, update_op = metrics.recall(labels, predictions, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
weighted_tp = 3.0 + 1.0
weighted_t = (2.0 + 3.0) + (4.0 + 1.0)
expected_precision = weighted_tp / weighted_t
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, recall.eval())
def testAllIncorrect(self):
np_inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(np_inputs)
labels = constant_op.constant(1 - np_inputs)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0, recall.eval())
def testZeroTruePositivesAndFalseNegativesGivesZeroRecall(self):
predictions = array_ops.zeros((1, 4))
labels = array_ops.zeros((1, 4))
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0, recall.eval())
class AUCTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.auc(predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)))
_assert_local_variables(self,
('auc/true_positives:0', 'auc/false_negatives:0',
'auc/false_positives:0', 'auc/true_negatives:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.auc(predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.auc(predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
auc, update_op = metrics.auc(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_auc = auc.eval()
for _ in range(10):
self.assertAlmostEqual(initial_auc, auc.eval(), 5)
def testAllCorrect(self):
self.allCorrectAsExpected('ROC')
def allCorrectAsExpected(self, curve):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
auc, update_op = metrics.auc(labels, predictions, curve=curve)
sess.run(variables.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, auc.eval())
def testSomeCorrect_multipleLabelDtypes(self):
with self.test_session() as sess:
for label_dtype in (
dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=label_dtype)
auc, update_op = metrics.auc(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op))
self.assertAlmostEqual(0.5, auc.eval())
def testWeighted1d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
weights = constant_op.constant([2], shape=(1, 1))
auc, update_op = metrics.auc(labels, predictions, weights=weights)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op), 5)
self.assertAlmostEqual(0.5, auc.eval(), 5)
def testWeighted2d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
weights = constant_op.constant([1, 2, 3, 4], shape=(1, 4))
auc, update_op = metrics.auc(labels, predictions, weights=weights)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.7, sess.run(update_op), 5)
self.assertAlmostEqual(0.7, auc.eval(), 5)
def testAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[0.1, 0.4, 0.35, 0.8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 0, 1, 1], shape=(1, 4))
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.79166, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.79166, auc.eval(), delta=1e-3)
def testAnotherAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[0.1, 0.4, 0.35, 0.8, 0.1, 0.135, 0.81],
shape=(1, 7),
dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 0, 1, 0, 1, 0, 1], shape=(1, 7))
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.610317, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.610317, auc.eval(), delta=1e-3)
def testThirdAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[0.0, 0.1, 0.2, 0.33, 0.3, 0.4, 0.5],
shape=(1, 7),
dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 0, 0, 0, 1, 1, 1], shape=(1, 7))
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.90277, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.90277, auc.eval(), delta=1e-3)
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(1 - inputs, dtype=dtypes_lib.float32)
auc, update_op = metrics.auc(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0, sess.run(update_op))
self.assertAlmostEqual(0, auc.eval())
def testZeroTruePositivesAndFalseNegativesGivesOneAUC(self):
with self.test_session() as sess:
predictions = array_ops.zeros([4], dtype=dtypes_lib.float32)
labels = array_ops.zeros([4])
auc, update_op = metrics.auc(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 6)
self.assertAlmostEqual(1, auc.eval(), 6)
def testRecallOneAndPrecisionOneGivesOnePRAUC(self):
with self.test_session() as sess:
predictions = array_ops.ones([4], dtype=dtypes_lib.float32)
labels = array_ops.ones([4])
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 6)
self.assertAlmostEqual(1, auc.eval(), 6)
def np_auc(self, predictions, labels, weights):
"""Computes the AUC explicitly using Numpy.
Args:
predictions: an ndarray with shape [N].
labels: an ndarray with shape [N].
weights: an ndarray with shape [N].
Returns:
the area under the ROC curve.
"""
if weights is None:
weights = np.ones(np.size(predictions))
is_positive = labels > 0
num_positives = np.sum(weights[is_positive])
num_negatives = np.sum(weights[~is_positive])
# Sort descending:
inds = np.argsort(-predictions)
sorted_labels = labels[inds]
sorted_weights = weights[inds]
is_positive = sorted_labels > 0
tp = np.cumsum(sorted_weights * is_positive) / num_positives
return np.sum((sorted_weights * tp)[~is_positive]) / num_negatives
def testWithMultipleUpdates(self):
num_samples = 1000
batch_size = 10
num_batches = int(num_samples / batch_size)
# Create the labels and data.
labels = np.random.randint(0, 2, size=num_samples)
noise = np.random.normal(0.0, scale=0.2, size=num_samples)
predictions = 0.4 + 0.2 * labels + noise
predictions[predictions > 1] = 1
predictions[predictions < 0] = 0
def _enqueue_as_batches(x, enqueue_ops):
x_batches = x.astype(np.float32).reshape((num_batches, batch_size))
x_queue = data_flow_ops.FIFOQueue(
num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,))
for i in range(num_batches):
enqueue_ops[i].append(x_queue.enqueue(x_batches[i, :]))
return x_queue.dequeue()
for weights in (None, np.ones(num_samples), np.random.exponential(
scale=1.0, size=num_samples)):
expected_auc = self.np_auc(predictions, labels, weights)
with self.test_session() as sess:
enqueue_ops = [[] for i in range(num_batches)]
tf_predictions = _enqueue_as_batches(predictions, enqueue_ops)
tf_labels = _enqueue_as_batches(labels, enqueue_ops)
tf_weights = (_enqueue_as_batches(weights, enqueue_ops) if
weights is not None else None)
for i in range(num_batches):
sess.run(enqueue_ops[i])
auc, update_op = metrics.auc(tf_labels,
tf_predictions,
curve='ROC',
num_thresholds=500,
weights=tf_weights)
sess.run(variables.local_variables_initializer())
for i in range(num_batches):
sess.run(update_op)
# Since this is only approximate, we can't expect a 6 digits match.
# Although with higher number of samples/thresholds we should see the
# accuracy improving
self.assertAlmostEqual(expected_auc, auc.eval(), 2)
class SpecificityAtSensitivityTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.specificity_at_sensitivity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
sensitivity=0.7)
_assert_local_variables(self,
('specificity_at_sensitivity/true_positives:0',
'specificity_at_sensitivity/false_negatives:0',
'specificity_at_sensitivity/false_positives:0',
'specificity_at_sensitivity/true_negatives:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.specificity_at_sensitivity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
sensitivity=0.7,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.specificity_at_sensitivity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
sensitivity=0.7,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=2, dtype=dtypes_lib.int64, seed=1)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_specificity = specificity.eval()
for _ in range(10):
self.assertAlmostEqual(initial_specificity, specificity.eval(), 5)
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, specificity.eval())
def testSomeCorrectHighSensitivity(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.45, 0.5, 0.8, 0.9]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.8)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1.0, sess.run(update_op))
self.assertAlmostEqual(1.0, specificity.eval())
def testSomeCorrectLowSensitivity(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted1d_multipleLabelDtypes(self):
for label_dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [3]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = math_ops.cast(labels_values, dtype=label_dtype)
weights = constant_op.constant(weights_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, weights=weights, sensitivity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted2d(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
weights = constant_op.constant(weights_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, weights=weights, sensitivity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(8.0 / 15.0, sess.run(update_op))
self.assertAlmostEqual(8.0 / 15.0, specificity.eval())
class SensitivityAtSpecificityTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.sensitivity_at_specificity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
specificity=0.7)
_assert_local_variables(self,
('sensitivity_at_specificity/true_positives:0',
'sensitivity_at_specificity/false_negatives:0',
'sensitivity_at_specificity/false_positives:0',
'sensitivity_at_specificity/true_negatives:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.sensitivity_at_specificity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
specificity=0.7,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.sensitivity_at_specificity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
specificity=0.7,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=2, dtype=dtypes_lib.int64, seed=1)
sensitivity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_sensitivity = sensitivity.eval()
for _ in range(10):
self.assertAlmostEqual(initial_sensitivity, sensitivity.eval(), 5)
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, specificity.eval())
def testSomeCorrectHighSpecificity(self):
predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.45, 0.5, 0.8, 0.9]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.8)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.8, sess.run(update_op))
self.assertAlmostEqual(0.8, specificity.eval())
def testSomeCorrectLowSpecificity(self):
predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted_multipleLabelDtypes(self):
for label_dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions_values = [
0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = math_ops.cast(labels_values, dtype=label_dtype)
weights = constant_op.constant(weights_values)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, weights=weights, specificity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.675, sess.run(update_op))
self.assertAlmostEqual(0.675, specificity.eval())
# TODO(nsilberman): Break this up into two sets of tests.
class PrecisionRecallThresholdsTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.precision_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0])
_assert_local_variables(self, (
'precision_at_thresholds/true_positives:0',
'precision_at_thresholds/false_positives:0',))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
prec, _ = metrics.precision_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
metrics_collections=[my_collection_name])
rec, _ = metrics.recall_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [prec, rec])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, precision_op = metrics.precision_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
updates_collections=[my_collection_name])
_, recall_op = metrics.recall_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
updates_collections=[my_collection_name])
self.assertListEqual(
ops.get_collection(my_collection_name), [precision_op, recall_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
thresholds = [0, 0.5, 1.0]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions, thresholds)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates, then verify idempotency.
sess.run([prec_op, rec_op])
initial_prec = prec.eval()
initial_rec = rec.eval()
for _ in range(10):
sess.run([prec_op, rec_op])
self.assertAllClose(initial_prec, prec.eval())
self.assertAllClose(initial_rec, rec.eval())
# TODO(nsilberman): fix tests (passing but incorrect).
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertEqual(1, prec.eval())
self.assertEqual(1, rec.eval())
def testSomeCorrect_multipleLabelDtypes(self):
with self.test_session() as sess:
for label_dtype in (
dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=label_dtype)
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0.5, prec.eval())
self.assertAlmostEqual(0.5, rec.eval())
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(1 - inputs, dtype=dtypes_lib.float32)
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0, prec.eval())
self.assertAlmostEqual(0, rec.eval())
def testWeights1d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes_lib.float32)
labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = constant_op.constant(
[[0], [1]], shape=(2, 1), dtype=dtypes_lib.float32)
thresholds = [0.5, 1.1]
prec, prec_op = metrics.precision_at_thresholds(
labels, predictions, thresholds, weights=weights)
rec, rec_op = metrics.recall_at_thresholds(
labels, predictions, thresholds, weights=weights)
[prec_low, prec_high] = array_ops.split(
value=prec, num_or_size_splits=2, axis=0)
prec_low = array_ops.reshape(prec_low, shape=())
prec_high = array_ops.reshape(prec_high, shape=())
[rec_low, rec_high] = array_ops.split(
value=rec, num_or_size_splits=2, axis=0)
rec_low = array_ops.reshape(rec_low, shape=())
rec_high = array_ops.reshape(rec_high, shape=())
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(1.0, prec_low.eval(), places=5)
self.assertAlmostEqual(0.0, prec_high.eval(), places=5)
self.assertAlmostEqual(1.0, rec_low.eval(), places=5)
self.assertAlmostEqual(0.0, rec_high.eval(), places=5)
def testWeights2d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes_lib.float32)
labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = constant_op.constant(
[[0, 0], [1, 1]], shape=(2, 2), dtype=dtypes_lib.float32)
thresholds = [0.5, 1.1]
prec, prec_op = metrics.precision_at_thresholds(
labels, predictions, thresholds, weights=weights)
rec, rec_op = metrics.recall_at_thresholds(
labels, predictions, thresholds, weights=weights)
[prec_low, prec_high] = array_ops.split(
value=prec, num_or_size_splits=2, axis=0)
prec_low = array_ops.reshape(prec_low, shape=())
prec_high = array_ops.reshape(prec_high, shape=())
[rec_low, rec_high] = array_ops.split(
value=rec, num_or_size_splits=2, axis=0)
rec_low = array_ops.reshape(rec_low, shape=())
rec_high = array_ops.reshape(rec_high, shape=())
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(1.0, prec_low.eval(), places=5)
self.assertAlmostEqual(0.0, prec_high.eval(), places=5)
self.assertAlmostEqual(1.0, rec_low.eval(), places=5)
self.assertAlmostEqual(0.0, rec_high.eval(), places=5)
def testExtremeThresholds(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 1, 1, 1], shape=(1, 4))
thresholds = [-1.0, 2.0] # lower/higher than any values
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
[prec_low, prec_high] = array_ops.split(
value=prec, num_or_size_splits=2, axis=0)
[rec_low, rec_high] = array_ops.split(
value=rec, num_or_size_splits=2, axis=0)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0.75, prec_low.eval())
self.assertAlmostEqual(0.0, prec_high.eval())
self.assertAlmostEqual(1.0, rec_low.eval())
self.assertAlmostEqual(0.0, rec_high.eval())
def testZeroLabelsPredictions(self):
with self.test_session() as sess:
predictions = array_ops.zeros([4], dtype=dtypes_lib.float32)
labels = array_ops.zeros([4])
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0, prec.eval(), 6)
self.assertAlmostEqual(0, rec.eval(), 6)
def testWithMultipleUpdates(self):
num_samples = 1000
batch_size = 10
num_batches = int(num_samples / batch_size)
# Create the labels and data.
labels = np.random.randint(0, 2, size=(num_samples, 1))
noise = np.random.normal(0.0, scale=0.2, size=(num_samples, 1))
predictions = 0.4 + 0.2 * labels + noise
predictions[predictions > 1] = 1
predictions[predictions < 0] = 0
thresholds = [0.3]
tp = 0
fp = 0
fn = 0
tn = 0
for i in range(num_samples):
if predictions[i] > thresholds[0]:
if labels[i] == 1:
tp += 1
else:
fp += 1
else:
if labels[i] == 1:
fn += 1
else:
tn += 1
epsilon = 1e-7
expected_prec = tp / (epsilon + tp + fp)
expected_rec = tp / (epsilon + tp + fn)
labels = labels.astype(np.float32)
predictions = predictions.astype(np.float32)
with self.test_session() as sess:
# Reshape the data so its easy to queue up:
predictions_batches = predictions.reshape((batch_size, num_batches))
labels_batches = labels.reshape((batch_size, num_batches))
# Enqueue the data:
predictions_queue = data_flow_ops.FIFOQueue(
num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,))
labels_queue = data_flow_ops.FIFOQueue(
num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,))
for i in range(int(num_batches)):
tf_prediction = constant_op.constant(predictions_batches[:, i])
tf_label = constant_op.constant(labels_batches[:, i])
sess.run([
predictions_queue.enqueue(tf_prediction),
labels_queue.enqueue(tf_label)
])
tf_predictions = predictions_queue.dequeue()
tf_labels = labels_queue.dequeue()
prec, prec_op = metrics.precision_at_thresholds(tf_labels, tf_predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(tf_labels, tf_predictions,
thresholds)
sess.run(variables.local_variables_initializer())
for _ in range(int(num_samples / batch_size)):
sess.run([prec_op, rec_op])
# Since this is only approximate, we can't expect a 6 digits match.
# Although with higher number of samples/thresholds we should see the
# accuracy improving
self.assertAlmostEqual(expected_prec, prec.eval(), 2)
self.assertAlmostEqual(expected_rec, rec.eval(), 2)
def _test_sparse_precision_at_k(predictions,
labels,
k,
expected,
class_id=None,
weights=None,
test_case=None):
with ops.Graph().as_default() as g, test_case.test_session(g):
if weights is not None:
weights = constant_op.constant(weights, dtypes_lib.float32)
metric, update = metrics.sparse_precision_at_k(
predictions=constant_op.constant(predictions, dtypes_lib.float32),
labels=labels,
k=k,
class_id=class_id,
weights=weights)
# Fails without initialized vars.
test_case.assertRaises(errors_impl.OpError, metric.eval)
test_case.assertRaises(errors_impl.OpError, update.eval)
variables.variables_initializer(variables.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(test_case, update.eval())
_assert_nan(test_case, metric.eval())
else:
test_case.assertEqual(expected, update.eval())
test_case.assertEqual(expected, metric.eval())
def _test_sparse_average_precision_at_k(predictions,
labels,
k,
expected,
weights=None,
test_case=None):
with ops.Graph().as_default() as g, test_case.test_session(g):
if weights is not None:
weights = constant_op.constant(weights, dtypes_lib.float32)
predictions = constant_op.constant(predictions, dtypes_lib.float32)
metric, update = metrics.sparse_average_precision_at_k(
labels, predictions, k, weights=weights)
# Fails without initialized vars.
test_case.assertRaises(errors_impl.OpError, metric.eval)
test_case.assertRaises(errors_impl.OpError, update.eval)
variables.variables_initializer(variables.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(test_case, update.eval())
_assert_nan(test_case, metric.eval())
else:
test_case.assertAlmostEqual(expected, update.eval())
test_case.assertAlmostEqual(expected, metric.eval())
class SingleLabelSparsePrecisionTest(test.TestCase):
def setUp(self):
self._predictions = ((0.1, 0.3, 0.2, 0.4), (0.1, 0.2, 0.3, 0.4))
indicator_labels = ((0, 0, 0, 1), (0, 0, 1, 0))
class_labels = (3, 2)
# Sparse vs dense, and 1d vs 2d labels should all be handled the same.
self._labels = (
_binary_2d_label_to_1d_sparse_value(indicator_labels),
_binary_2d_label_to_2d_sparse_value(indicator_labels), np.array(
class_labels, dtype=np.int64), np.array(
[[class_id] for class_id in class_labels], dtype=np.int64))
self._test_sparse_precision_at_k = functools.partial(
_test_sparse_precision_at_k, test_case=self)
self._test_sparse_average_precision_at_k = functools.partial(
_test_sparse_average_precision_at_k, test_case=self)
def test_at_k1_nan(self):
for labels in self._labels:
# Classes 0,1,2 have 0 predictions, classes -1 and 4 are out of range.
for class_id in (-1, 0, 1, 2, 4):
self._test_sparse_precision_at_k(
self._predictions, labels, k=1, expected=NAN, class_id=class_id)
def test_at_k1(self):
for labels in self._labels:
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
self._predictions, labels, k=1, expected=1.0 / 2, class_id=3)
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
self._predictions, labels, k=1, expected=1.0 / 2)
class MultiLabelSparsePrecisionTest(test.TestCase):
def setUp(self):
self._test_sparse_precision_at_k = functools.partial(
_test_sparse_precision_at_k, test_case=self)
self._test_sparse_average_precision_at_k = functools.partial(
_test_sparse_average_precision_at_k, test_case=self)
def test_average_precision(self):
# Example 1.
# Matches example here:
# fastml.com/what-you-wanted-to-know-about-mean-average-precision
labels_ex1 = (0, 1, 2, 3, 4)
labels = np.array([labels_ex1], dtype=np.int64)
predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3)
predictions = (predictions_ex1,)
precision_ex1 = (0.0 / 1, 1.0 / 2, 1.0 / 3, 2.0 / 4)
avg_precision_ex1 = (0.0 / 1, precision_ex1[1] / 2, precision_ex1[1] / 3,
(precision_ex1[1] + precision_ex1[3]) / 4)
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex1[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex1[i])
# Example 2.
labels_ex2 = (0, 2, 4, 5, 6)
labels = np.array([labels_ex2], dtype=np.int64)
predictions_ex2 = (0.3, 0.5, 0.0, 0.4, 0.0, 0.1, 0.2)
predictions = (predictions_ex2,)
precision_ex2 = (0.0 / 1, 0.0 / 2, 1.0 / 3, 2.0 / 4)
avg_precision_ex2 = (0.0 / 1, 0.0 / 2, precision_ex2[2] / 3,
(precision_ex2[2] + precision_ex2[3]) / 4)
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex2[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex2[i])
# Both examples, we expect both precision and average precision to be the
# average of the 2 examples.
labels = np.array([labels_ex1, labels_ex2], dtype=np.int64)
predictions = (predictions_ex1, predictions_ex2)
streaming_precision = [(ex1 + ex2) / 2
for ex1, ex2 in zip(precision_ex1, precision_ex2)]
streaming_average_precision = [
(ex1 + ex2) / 2
for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2)
]
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=streaming_precision[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=streaming_average_precision[i])
# Weighted examples, we expect streaming average precision to be the
# weighted average of the 2 examples.
weights = (0.3, 0.6)
streaming_average_precision = [
(weights[0] * ex1 + weights[1] * ex2) / (weights[0] + weights[1])
for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2)
]
for i in xrange(4):
k = i + 1
self._test_sparse_average_precision_at_k(
predictions,
labels,
k,
expected=streaming_average_precision[i],
weights=weights)
def test_average_precision_some_labels_out_of_range(self):
"""Tests that labels outside the [0, n_classes) range are ignored."""
labels_ex1 = (-1, 0, 1, 2, 3, 4, 7)
labels = np.array([labels_ex1], dtype=np.int64)
predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3)
predictions = (predictions_ex1,)
precision_ex1 = (0.0 / 1, 1.0 / 2, 1.0 / 3, 2.0 / 4)
avg_precision_ex1 = (0.0 / 1, precision_ex1[1] / 2, precision_ex1[1] / 3,
(precision_ex1[1] + precision_ex1[3]) / 4)
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex1[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex1[i])
def test_three_labels_at_k5_no_predictions(self):
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_2d_sparse_value(
[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range.
for class_id in (-1, 1, 3, 8, 10):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_three_labels_at_k5_no_labels(self):
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_2d_sparse_value(
[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 0,4,6,9: 0 labels, >=1 prediction.
for class_id in (0, 4, 6, 9):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0, class_id=class_id)
def test_three_labels_at_k5(self):
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_2d_sparse_value(
[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 2: 2 labels, 2 correct predictions.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, 1 correct prediction.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, 1 incorrect prediction.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 10 predictions, 3 correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=3.0 / 10)
def test_three_labels_at_k5_some_out_of_range(self):
"""Tests that labels outside the [0, n_classes) range are ignored."""
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sp_labels = sparse_tensor.SparseTensorValue(
indices=[[0, 0], [0, 1], [0, 2], [0, 3], [1, 0], [1, 1], [1, 2],
[1, 3]],
# values -1 and 10 are outside the [0, n_classes) range and are ignored.
values=np.array([2, 7, -1, 8, 1, 2, 5, 10], np.int64),
dense_shape=[2, 4])
# Class 2: 2 labels, 2 correct predictions.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, 1 correct prediction.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, 1 incorrect prediction.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 10 predictions, 3 correct.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=3.0 / 10)
def test_3d_nan(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range.
for class_id in (-1, 1, 3, 8, 10):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_3d_no_labels(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Classes 0,4,6,9: 0 labels, >=1 prediction.
for class_id in (0, 4, 6, 9):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0, class_id=class_id)
def test_3d(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Class 2: 4 predictions, all correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=4.0 / 4, class_id=2)
# Class 5: 2 predictions, both correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2, class_id=5)
# Class 7: 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 2, class_id=7)
# All classes: 20 predictions, 7 correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=7.0 / 20)
def test_3d_ignore_some(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Class 2: 2 predictions, both correct.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[1], [0]])
# Class 2: 2 predictions, both correct.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[0], [1]])
# Class 7: 1 incorrect prediction.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=0.0 / 1.0,
class_id=7,
weights=[[1], [0]])
# Class 7: 1 correct prediction.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=1.0 / 1.0,
class_id=7,
weights=[[0], [1]])
# Class 7: no predictions.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=NAN,
class_id=7,
weights=[[1, 0], [0, 1]])
# Class 7: 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=1.0 / 2.0,
class_id=7,
weights=[[0, 1], [1, 0]])
def _test_recall_at_k(predictions,
labels,
k,
expected,
class_id=None,
weights=None,
test_case=None):
with ops.Graph().as_default() as g, test_case.test_session(g):
if weights is not None:
weights = constant_op.constant(weights, dtypes_lib.float32)
metric, update = metrics.recall_at_k(
predictions=constant_op.constant(predictions, dtypes_lib.float32),
labels=labels,
k=k,
class_id=class_id,
weights=weights)
# Fails without initialized vars.
test_case.assertRaises(errors_impl.OpError, metric.eval)
test_case.assertRaises(errors_impl.OpError, update.eval)
variables.variables_initializer(variables.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(test_case, update.eval())
_assert_nan(test_case, metric.eval())
else:
test_case.assertEqual(expected, update.eval())
test_case.assertEqual(expected, metric.eval())
class SingleLabelRecallAtKTest(test.TestCase):
def setUp(self):
self._predictions = ((0.1, 0.3, 0.2, 0.4), (0.1, 0.2, 0.3, 0.4))
indicator_labels = ((0, 0, 0, 1), (0, 0, 1, 0))
class_labels = (3, 2)
# Sparse vs dense, and 1d vs 2d labels should all be handled the same.
self._labels = (
_binary_2d_label_to_1d_sparse_value(indicator_labels),
_binary_2d_label_to_2d_sparse_value(indicator_labels), np.array(
class_labels, dtype=np.int64), np.array(
[[class_id] for class_id in class_labels], dtype=np.int64))
self._test_recall_at_k = functools.partial(
_test_recall_at_k, test_case=self)
def test_at_k1_nan(self):
# Classes 0,1 have 0 labels, 0 predictions, classes -1 and 4 are out of
# range.
for labels in self._labels:
for class_id in (-1, 0, 1, 4):
self._test_recall_at_k(
self._predictions, labels, k=1, expected=NAN, class_id=class_id)
def test_at_k1_no_predictions(self):
for labels in self._labels:
# Class 2: 0 predictions.
self._test_recall_at_k(
self._predictions, labels, k=1, expected=0.0, class_id=2)
def test_one_label_at_k1(self):
for labels in self._labels:
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_recall_at_k(
self._predictions, labels, k=1, expected=1.0 / 1, class_id=3)
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_recall_at_k(self._predictions, labels, k=1, expected=1.0 / 2)
def test_one_label_at_k1_weighted(self):
predictions = self._predictions
for labels in self._labels:
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_recall_at_k(
predictions, labels, k=1, expected=NAN, class_id=3, weights=(0.0,))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(1.0,))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(2.0,))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=NAN,
class_id=3,
weights=(0.0, 0.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=NAN,
class_id=3,
weights=(0.0, 1.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(1.0, 0.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(1.0, 1.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=2.0 / 2,
class_id=3,
weights=(2.0, 3.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=3.0 / 3,
class_id=3,
weights=(3.0, 2.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=0.3 / 0.3,
class_id=3,
weights=(0.3, 0.6))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=0.6 / 0.6,
class_id=3,
weights=(0.6, 0.3))
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_recall_at_k(
predictions, labels, k=1, expected=NAN, weights=(0.0,))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(1.0,))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(2.0,))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1, weights=(1.0, 0.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=0.0 / 1, weights=(0.0, 1.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(1.0, 1.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=2.0 / 5, weights=(2.0, 3.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=3.0 / 5, weights=(3.0, 2.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=0.3 / 0.9, weights=(0.3, 0.6))
self._test_recall_at_k(
predictions, labels, k=1, expected=0.6 / 0.9, weights=(0.6, 0.3))
class MultiLabel2dRecallAtKTest(test.TestCase):
def setUp(self):
self._predictions = ((0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9),
(0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6))
indicator_labels = ((0, 0, 1, 0, 0, 0, 0, 1, 1, 0),
(0, 1, 1, 0, 0, 1, 0, 0, 0, 0))
class_labels = ((2, 7, 8), (1, 2, 5))
# Sparse vs dense labels should be handled the same.
self._labels = (_binary_2d_label_to_2d_sparse_value(indicator_labels),
np.array(
class_labels, dtype=np.int64))
self._test_recall_at_k = functools.partial(
_test_recall_at_k, test_case=self)
def test_at_k5_nan(self):
for labels in self._labels:
# Classes 0,3,4,6,9 have 0 labels, class 10 is out of range.
for class_id in (0, 3, 4, 6, 9, 10):
self._test_recall_at_k(
self._predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_at_k5_no_predictions(self):
for labels in self._labels:
# Class 8: 1 label, no predictions.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=0.0 / 1, class_id=8)
def test_at_k5(self):
for labels in self._labels:
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 6 labels, 3 correct.
self._test_recall_at_k(self._predictions, labels, k=5, expected=3.0 / 6)
def test_at_k5_some_out_of_range(self):
"""Tests that labels outside the [0, n_classes) count in denominator."""
labels = sparse_tensor.SparseTensorValue(
indices=[[0, 0], [0, 1], [0, 2], [0, 3], [1, 0], [1, 1], [1, 2],
[1, 3]],
# values -1 and 10 are outside the [0, n_classes) range.
values=np.array([2, 7, -1, 8, 1, 2, 5, 10], np.int64),
dense_shape=[2, 4])
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 8 labels, 3 correct.
self._test_recall_at_k(self._predictions, labels, k=5, expected=3.0 / 8)
class MultiLabel3dRecallAtKTest(test.TestCase):
def setUp(self):
self._predictions = (((0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9),
(0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6)),
((0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6),
(0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9)))
# Note: We don't test dense labels here, since examples have different
# numbers of labels.
self._labels = _binary_3d_label_to_sparse_value(((
(0, 0, 1, 0, 0, 0, 0, 1, 1, 0), (0, 1, 1, 0, 0, 1, 0, 0, 0, 0)), (
(0, 1, 1, 0, 0, 1, 0, 1, 0, 0), (0, 0, 1, 0, 0, 0, 0, 0, 1, 0))))
self._test_recall_at_k = functools.partial(
_test_recall_at_k, test_case=self)
def test_3d_nan(self):
# Classes 0,3,4,6,9 have 0 labels, class 10 is out of range.
for class_id in (0, 3, 4, 6, 9, 10):
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=NAN, class_id=class_id)
def test_3d_no_predictions(self):
# Classes 1,8 have 0 predictions, >=1 label.
for class_id in (1, 8):
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=0.0, class_id=class_id)
def test_3d(self):
# Class 2: 4 labels, all correct.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=4.0 / 4, class_id=2)
# Class 5: 2 labels, both correct.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=2.0 / 2, class_id=5)
# Class 7: 2 labels, 1 incorrect.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=1.0 / 2, class_id=7)
# All classes: 12 labels, 7 correct.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=7.0 / 12)
def test_3d_ignore_all(self):
for class_id in xrange(10):
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
class_id=class_id,
weights=[[0], [0]])
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
class_id=class_id,
weights=[[0, 0], [0, 0]])
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=NAN, weights=[[0], [0]])
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
weights=[[0, 0], [0, 0]])
def test_3d_ignore_some(self):
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[1], [0]])
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[0], [1]])
# Class 7: 1 label, correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=1.0 / 1.0,
class_id=7,
weights=[[0], [1]])
# Class 7: 1 label, incorrect.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=0.0 / 1.0,
class_id=7,
weights=[[1], [0]])
# Class 7: 2 labels, 1 correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=1.0 / 2.0,
class_id=7,
weights=[[1, 0], [1, 0]])
# Class 7: No labels.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
class_id=7,
weights=[[0, 1], [0, 1]])
class MeanAbsoluteErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_absolute_error(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('mean_absolute_error/count:0',
'mean_absolute_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_absolute_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_absolute_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.mean_absolute_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateWithErrorAndWeights(self):
predictions = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4))
error, update_op = metrics.mean_absolute_error(labels, predictions, weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(3, sess.run(update_op))
self.assertEqual(3, error.eval())
class MeanRelativeErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_relative_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
normalizer=array_ops.ones((10, 1)))
_assert_local_variables(self, ('mean_relative_error/count:0',
'mean_relative_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_relative_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
normalizer=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_relative_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
normalizer=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
normalizer = random_ops.random_normal((10, 3), seed=3)
error, update_op = metrics.mean_relative_error(labels, predictions,
normalizer)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateNormalizedByLabels(self):
np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32)
np_labels = np.asarray([1, 3, 2, 3], dtype=np.float32)
expected_error = np.mean(
np.divide(np.absolute(np_predictions - np_labels), np_labels))
predictions = constant_op.constant(
np_predictions, shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(np_labels, shape=(1, 4))
error, update_op = metrics.mean_relative_error(
labels, predictions, normalizer=labels)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(expected_error, sess.run(update_op))
self.assertEqual(expected_error, error.eval())
def testSingleUpdateNormalizedByZeros(self):
np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32)
predictions = constant_op.constant(
np_predictions, shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_relative_error(
labels, predictions, normalizer=array_ops.zeros_like(labels))
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0.0, sess.run(update_op))
self.assertEqual(0.0, error.eval())
class MeanSquaredErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_squared_error(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('mean_squared_error/count:0',
'mean_squared_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
predictions = array_ops.zeros((1, 3), dtype=dtypes_lib.float32)
labels = array_ops.zeros((1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithError(self):
predictions = constant_op.constant(
[2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(6, sess.run(update_op))
self.assertEqual(6, error.eval())
def testSingleUpdateWithErrorAndWeights(self):
predictions = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4))
error, update_op = metrics.mean_squared_error(labels, predictions, weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(13, sess.run(update_op))
self.assertEqual(13, error.eval())
def testMultipleBatchesOfSizeOne(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue, [10, 8, 6])
_enqueue_vector(sess, preds_queue, [-4, 3, -1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue, [1, 3, 2])
_enqueue_vector(sess, labels_queue, [2, 4, 6])
labels = labels_queue.dequeue()
error, update_op = metrics.mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertAlmostEqual(208.0 / 6, sess.run(update_op), 5)
self.assertAlmostEqual(208.0 / 6, error.eval(), 5)
def testMetricsComputedConcurrently(self):
with self.test_session() as sess:
# Create the queue that populates one set of predictions.
preds_queue0 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue0, [10, 8, 6])
_enqueue_vector(sess, preds_queue0, [-4, 3, -1])
predictions0 = preds_queue0.dequeue()
# Create the queue that populates one set of predictions.
preds_queue1 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue1, [0, 1, 1])
_enqueue_vector(sess, preds_queue1, [1, 1, 0])
predictions1 = preds_queue1.dequeue()
# Create the queue that populates one set of labels.
labels_queue0 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue0, [1, 3, 2])
_enqueue_vector(sess, labels_queue0, [2, 4, 6])
labels0 = labels_queue0.dequeue()
# Create the queue that populates another set of labels.
labels_queue1 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue1, [-5, -3, -1])
_enqueue_vector(sess, labels_queue1, [5, 4, 3])
labels1 = labels_queue1.dequeue()
mse0, update_op0 = metrics.mean_squared_error(
labels0, predictions0, name='msd0')
mse1, update_op1 = metrics.mean_squared_error(
labels1, predictions1, name='msd1')
sess.run(variables.local_variables_initializer())
sess.run([update_op0, update_op1])
sess.run([update_op0, update_op1])
mse0, mse1 = sess.run([mse0, mse1])
self.assertAlmostEqual(208.0 / 6, mse0, 5)
self.assertAlmostEqual(79.0 / 6, mse1, 5)
def testMultipleMetricsOnMultipleBatchesOfSizeOne(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue, [10, 8, 6])
_enqueue_vector(sess, preds_queue, [-4, 3, -1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue, [1, 3, 2])
_enqueue_vector(sess, labels_queue, [2, 4, 6])
labels = labels_queue.dequeue()
mae, ma_update_op = metrics.mean_absolute_error(labels, predictions)
mse, ms_update_op = metrics.mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
sess.run([ma_update_op, ms_update_op])
sess.run([ma_update_op, ms_update_op])
self.assertAlmostEqual(32.0 / 6, mae.eval(), 5)
self.assertAlmostEqual(208.0 / 6, mse.eval(), 5)
class RootMeanSquaredErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.root_mean_squared_error(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('root_mean_squared_error/count:0',
'root_mean_squared_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.root_mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.root_mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.root_mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
with self.test_session() as sess:
predictions = constant_op.constant(
0.0, shape=(1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(0.0, shape=(1, 3), dtype=dtypes_lib.float32)
rmse, update_op = metrics.root_mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, rmse.eval())
def testSingleUpdateWithError(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32)
rmse, update_op = metrics.root_mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(math.sqrt(6), update_op.eval(), 5)
self.assertAlmostEqual(math.sqrt(6), rmse.eval(), 5)
def testSingleUpdateWithErrorAndWeights(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4))
rmse, update_op = metrics.root_mean_squared_error(labels, predictions,
weights)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(math.sqrt(13), sess.run(update_op))
self.assertAlmostEqual(math.sqrt(13), rmse.eval(), 5)
def _reweight(predictions, labels, weights):
return (np.concatenate([[p] * int(w) for p, w in zip(predictions, weights)]),
np.concatenate([[l] * int(w) for l, w in zip(labels, weights)]))
class MeanCosineDistanceTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_cosine_distance(
predictions=array_ops.ones((10, 3)),
labels=array_ops.ones((10, 3)),
dim=1)
_assert_local_variables(self, (
'mean_cosine_distance/count:0',
'mean_cosine_distance/total:0',))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_cosine_distance(
predictions=array_ops.ones((10, 3)),
labels=array_ops.ones((10, 3)),
dim=1,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_cosine_distance(
predictions=array_ops.ones((10, 3)),
labels=array_ops.ones((10, 3)),
dim=1,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=1)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
predictions = constant_op.constant(
np_labels, shape=(1, 3, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(1, 3, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=2)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithError1(self):
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=2)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 5)
self.assertAlmostEqual(1, error.eval(), 5)
def testSingleUpdateWithError2(self):
np_predictions = np.matrix(
('0.819031913261206 0.567041924552012 0.087465312324590;'
'-0.665139432070255 -0.739487441769973 -0.103671883216994;'
'0.707106781186548 -0.707106781186548 0'))
np_labels = np.matrix(
('0.819031913261206 0.567041924552012 0.087465312324590;'
'0.665139432070255 0.739487441769973 0.103671883216994;'
'0.707106781186548 0.707106781186548 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=2)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1.0, sess.run(update_op), 5)
self.assertAlmostEqual(1.0, error.eval(), 5)
def testSingleUpdateWithErrorAndWeights1(self):
np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0'))
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
weights = constant_op.constant(
[1, 0, 0], shape=(3, 1, 1), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(
labels, predictions, dim=2, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithErrorAndWeights2(self):
np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0'))
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
weights = constant_op.constant(
[0, 1, 1], shape=(3, 1, 1), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(
labels, predictions, dim=2, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(1.5, update_op.eval())
self.assertEqual(1.5, error.eval())
class PcntBelowThreshTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.percentage_below(values=array_ops.ones((10,)), threshold=2)
_assert_local_variables(self, (
'percentage_below_threshold/count:0',
'percentage_below_threshold/total:0',))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.percentage_below(
values=array_ops.ones((10,)),
threshold=2,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.percentage_below(
values=array_ops.ones((10,)),
threshold=2,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testOneUpdate(self):
with self.test_session() as sess:
values = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
pcnt0, update_op0 = metrics.percentage_below(values, 100, name='high')
pcnt1, update_op1 = metrics.percentage_below(values, 7, name='medium')
pcnt2, update_op2 = metrics.percentage_below(values, 1, name='low')
sess.run(variables.local_variables_initializer())
sess.run([update_op0, update_op1, update_op2])
pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2])
self.assertAlmostEqual(1.0, pcnt0, 5)
self.assertAlmostEqual(0.75, pcnt1, 5)
self.assertAlmostEqual(0.0, pcnt2, 5)
def testSomePresentOneUpdate(self):
with self.test_session() as sess:
values = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant(
[1, 0, 0, 1], shape=(1, 4), dtype=dtypes_lib.float32)
pcnt0, update_op0 = metrics.percentage_below(
values, 100, weights=weights, name='high')
pcnt1, update_op1 = metrics.percentage_below(
values, 7, weights=weights, name='medium')
pcnt2, update_op2 = metrics.percentage_below(
values, 1, weights=weights, name='low')
sess.run(variables.local_variables_initializer())
self.assertListEqual([1.0, 0.5, 0.0],
sess.run([update_op0, update_op1, update_op2]))
pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2])
self.assertAlmostEqual(1.0, pcnt0, 5)
self.assertAlmostEqual(0.5, pcnt1, 5)
self.assertAlmostEqual(0.0, pcnt2, 5)
class MeanIOUTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.mean_iou(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2)
_assert_local_variables(self, ('mean_iou/total_confusion_matrix:0',))
def testMetricsCollections(self):
my_collection_name = '__metrics__'
mean_iou, _ = metrics.mean_iou(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean_iou])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_iou(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones([10, 3])
labels = array_ops.ones([10, 4])
with self.assertRaises(ValueError):
metrics.mean_iou(labels, predictions, num_classes=2)
def testLabelsAndWeightsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones([10])
labels = array_ops.ones([10])
weights = array_ops.zeros([9])
with self.assertRaises(ValueError):
metrics.mean_iou(labels, predictions, num_classes=2, weights=weights)
def testValueTensorIsIdempotent(self):
num_classes = 3
predictions = random_ops.random_uniform(
[10], maxval=num_classes, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
[10], maxval=num_classes, dtype=dtypes_lib.int64, seed=1)
mean_iou, update_op = metrics.mean_iou(
labels, predictions, num_classes=num_classes)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_mean_iou = mean_iou.eval()
for _ in range(10):
self.assertEqual(initial_mean_iou, mean_iou.eval())
def testMultipleUpdates(self):
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0 / 2.0, 1.0 / 4.0, 0.])
self.assertEqual(desired_output, miou.eval())
def testMultipleUpdatesWithWeights(self):
num_classes = 2
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
weights = weights_queue.dequeue()
mean_iou, update_op = metrics.mean_iou(
labels, predictions, num_classes, weights=weights)
variables.local_variables_initializer().run()
for _ in range(6):
sess.run(update_op)
desired_output = np.mean([2.0 / 3.0, 1.0 / 2.0])
self.assertAlmostEqual(desired_output, mean_iou.eval())
def testMultipleUpdatesWithMissingClass(self):
# Test the case where there are no predicions and labels for
# one class, and thus there is one row and one column with
# zero entries in the confusion matrix.
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
# There is no prediction for class 2.
preds_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
# There is label for class 2.
labels_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0 / 3.0, 2.0 / 4.0, 0.])
self.assertAlmostEqual(desired_output, miou.eval())
def testUpdateOpEvalIsAccumulatedConfusionMatrix(self):
predictions = array_ops.concat(
[
constant_op.constant(
0, shape=[5]), constant_op.constant(
1, shape=[5])
],
0)
labels = array_ops.concat(
[
constant_op.constant(
0, shape=[3]), constant_op.constant(
1, shape=[7])
],
0)
num_classes = 2
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
confusion_matrix = update_op.eval()
self.assertAllEqual([[3, 0], [2, 5]], confusion_matrix)
desired_miou = np.mean([3. / 5., 5. / 7.])
self.assertAlmostEqual(desired_miou, miou.eval())
def testAllCorrect(self):
predictions = array_ops.zeros([40])
labels = array_ops.zeros([40])
num_classes = 1
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
self.assertEqual(40, update_op.eval()[0])
self.assertEqual(1.0, miou.eval())
def testAllWrong(self):
predictions = array_ops.zeros([40])
labels = array_ops.ones([40])
num_classes = 2
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
self.assertAllEqual([[0, 0], [40, 0]], update_op.eval())
self.assertEqual(0., miou.eval())
def testResultsWithSomeMissing(self):
predictions = array_ops.concat(
[
constant_op.constant(
0, shape=[5]), constant_op.constant(
1, shape=[5])
],
0)
labels = array_ops.concat(
[
constant_op.constant(
0, shape=[3]), constant_op.constant(
1, shape=[7])
],
0)
num_classes = 2
weights = array_ops.concat(
[
constant_op.constant(
0, shape=[1]), constant_op.constant(
1, shape=[8]), constant_op.constant(
0, shape=[1])
],
0)
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(
labels, predictions, num_classes, weights=weights)
sess.run(variables.local_variables_initializer())
self.assertAllEqual([[2, 0], [2, 4]], update_op.eval())
desired_miou = np.mean([2. / 4., 4. / 6.])
self.assertAlmostEqual(desired_miou, miou.eval())
class MeanPerClassAccuracyTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.mean_per_class_accuracy(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2)
_assert_local_variables(self, ('mean_accuracy/total_confusion_matrix:0',))
def testMetricsCollections(self):
my_collection_name = '__metrics__'
mean_accuracy, _ = metrics.mean_per_class_accuracy(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2,
metrics_collections=[my_collection_name])
self.assertListEqual(
ops.get_collection(my_collection_name), [mean_accuracy])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_per_class_accuracy(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones([10, 3])
labels = array_ops.ones([10, 4])
with self.assertRaises(ValueError):
metrics.mean_per_class_accuracy(labels, predictions, num_classes=2)
def testLabelsAndWeightsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones([10])
labels = array_ops.ones([10])
weights = array_ops.zeros([9])
with self.assertRaises(ValueError):
metrics.mean_per_class_accuracy(
labels, predictions, num_classes=2, weights=weights)
def testValueTensorIsIdempotent(self):
num_classes = 3
predictions = random_ops.random_uniform(
[10], maxval=num_classes, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
[10], maxval=num_classes, dtype=dtypes_lib.int64, seed=1)
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes=num_classes)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_mean_accuracy = mean_accuracy.eval()
for _ in range(10):
self.assertEqual(initial_mean_accuracy, mean_accuracy.eval())
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0, 1.0 / 3.0, 0.0])
self.assertAlmostEqual(desired_output, mean_accuracy.eval())
def testMultipleUpdatesWithWeights(self):
num_classes = 2
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
weights = weights_queue.dequeue()
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes, weights=weights)
variables.local_variables_initializer().run()
for _ in range(6):
sess.run(update_op)
desired_output = np.mean([2.0 / 2.0, 1.0 / 2.0])
self.assertAlmostEqual(desired_output, mean_accuracy.eval())
def testMultipleUpdatesWithMissingClass(self):
# Test the case where there are no predicions and labels for
# one class, and thus there is one row and one column with
# zero entries in the confusion matrix.
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
# There is no prediction for class 2.
preds_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
# There is label for class 2.
labels_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0 / 2.0, 2.0 / 3.0, 0.])
self.assertAlmostEqual(desired_output, mean_accuracy.eval())
def testUpdateOpEvalIsAccumulatedConfusionMatrix(self):
predictions = array_ops.concat([
constant_op.constant(0, shape=[5]), constant_op.constant(1, shape=[5])
], 0)
labels = array_ops.concat([
constant_op.constant(0, shape=[3]), constant_op.constant(1, shape=[7])
], 0)
num_classes = 2
with self.test_session() as sess:
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
confusion_matrix = update_op.eval()
self.assertAllEqual([[3, 0], [2, 5]], confusion_matrix)
desired_mean_accuracy = np.mean([3. / 3., 5. / 7.])
self.assertAlmostEqual(desired_mean_accuracy, mean_accuracy.eval())
def testAllCorrect(self):
predictions = array_ops.zeros([40])
labels = array_ops.zeros([40])
num_classes = 1
with self.test_session() as sess:
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
self.assertEqual(40, update_op.eval()[0])
self.assertEqual(1.0, mean_accuracy.eval())
def testAllWrong(self):
predictions = array_ops.zeros([40])
labels = array_ops.ones([40])
num_classes = 2
with self.test_session() as sess:
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
self.assertAllEqual([[0, 0], [40, 0]], update_op.eval())
self.assertEqual(0., mean_accuracy.eval())
def testResultsWithSomeMissing(self):
predictions = array_ops.concat([
constant_op.constant(0, shape=[5]), constant_op.constant(1, shape=[5])
], 0)
labels = array_ops.concat([
constant_op.constant(0, shape=[3]), constant_op.constant(1, shape=[7])
], 0)
num_classes = 2
weights = array_ops.concat([
constant_op.constant(0, shape=[1]), constant_op.constant(1, shape=[8]),
constant_op.constant(0, shape=[1])
], 0)
with self.test_session() as sess:
mean_accuracy, update_op = metrics.mean_per_class_accuracy(
labels, predictions, num_classes, weights=weights)
sess.run(variables.local_variables_initializer())
self.assertAllEqual([[2, 0], [2, 4]], update_op.eval())
desired_mean_accuracy = np.mean([2. / 2., 4. / 6.])
self.assertAlmostEqual(desired_mean_accuracy, mean_accuracy.eval())
if __name__ == '__main__':
test.main()
| apache-2.0 |
jupierce/openshift-tools | ansible/roles/oso_zagg_deploy/filter_plugins/filters.py | 25 | 2619 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
'''
Custom filters for use in openshift_sso_app
'''
class FilterModule(object):
''' Custom ansible filters '''
@staticmethod
def get_running_pods(podlist_results, pod_names_to_match):
''' This is a filter to see which pods in a project are running
This filter takes the
Example:
given this:
podlist_results:
results:
- items:
- status:
phase: Running
metadata:
labels:
deploymentconfig: oso-memcached-sso1
- status:
phase: Terminated
metadata:
labels:
deploymentconfig: oso-memcached-sso2
- status:
phase: Running
metadata:
labels:
deploymentconfig: oso-saml-sso
- status:
phase: Running
metadata:
labels:
deploymentconfig: oso-saml-sso
Then
{{ podlist_results | get_running_pods(['oso-memcached-sso1', 'oso-memcached-sso2', ''oso-saml-sso]) }}
gives an output of ['oso-memcached-sso1', 'oso-saml-sso', 'oso-saml-sso']
'''
rval = []
if 'results' not in podlist_results:
return rval
if len(podlist_results['results']) == 0:
return rval
if 'items' not in podlist_results['results'][0]:
return rval
for pod in podlist_results['results'][0]['items']:
if 'status' not in pod:
continue
if 'phase' not in pod['status']:
continue
if pod['status']['phase'] != 'Running':
continue
if 'metadata' not in pod or 'labels' not in pod['metadata']:
continue
if 'deploymentconfig' not in pod['metadata']['labels']:
continue
if pod['metadata']['labels']['deploymentconfig'] in pod_names_to_match:
rval.append(pod['metadata']['labels']['deploymentconfig'])
return rval
def filters(self):
''' returns a mapping of filters to methods '''
return {
"get_running_pods": self.get_running_pods,
}
| apache-2.0 |
deepcell/xhtml2pdf | xhtml2pdf/document.py | 37 | 6381 | # -*- coding: utf-8 -*-
from xhtml2pdf.context import pisaContext
from xhtml2pdf.default import DEFAULT_CSS
from xhtml2pdf.parser import pisaParser
from reportlab.platypus.flowables import Spacer
from reportlab.platypus.frames import Frame
from xhtml2pdf.xhtml2pdf_reportlab import PmlBaseDoc, PmlPageTemplate
from xhtml2pdf.util import pisaTempFile, getBox, pyPdf
import cgi
import logging
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log = logging.getLogger("xhtml2pdf")
def pisaErrorDocument(dest, c):
out = pisaTempFile(capacity=c.capacity)
out.write("<p style='background-color:red;'><strong>%d error(s) occured:</strong><p>" % c.err)
for mode, line, msg, _ in c.log:
if mode=="error":
out.write("<pre>%s in line %d: %s</pre>" % (mode, line, cgi.escape(msg)))
out.write("<p><strong>%d warning(s) occured:</strong><p>" % c.warn)
for mode, line, msg, _ in c.log:
if mode=="warning":
out.write("<p>%s in line %d: %s</p>" % (mode, line, cgi.escape(msg)))
return pisaDocument(out.getvalue(), dest, raise_exception=False)
def pisaStory(src, path=None, link_callback=None, debug=0, default_css=None,
xhtml=False, encoding=None, context=None, xml_output=None,
**kw):
# Prepare Context
if not context:
context = pisaContext(path, debug=debug)
context.pathCallback = link_callback
# Use a default set of CSS definitions to get an expected output
if default_css is None:
default_css = DEFAULT_CSS
# Parse and fill the story
pisaParser(src, context, default_css, xhtml, encoding, xml_output)
# Avoid empty documents
if not context.story:
context.story = [Spacer(1,1)]
if context.indexing_story:
context.story.append(context.indexing_story)
# Remove anchors if they do not exist (because of a bug in Reportlab)
for frag, anchor in context.anchorFrag:
if anchor not in context.anchorName:
frag.link = None
return context
def pisaDocument(src, dest=None, path=None, link_callback=None, debug=0,
default_css=None, xhtml=False, encoding=None, xml_output=None,
raise_exception=True, capacity=100*1024, **kw):
log.debug("pisaDocument options:\n src = %r\n dest = %r\n path = %r\n link_callback = %r\n xhtml = %r",
src,
dest,
path,
link_callback,
xhtml)
# Prepare simple context
context = pisaContext(path, debug=debug, capacity=capacity)
context.pathCallback = link_callback
# Build story
context = pisaStory(src, path, link_callback, debug, default_css, xhtml,
encoding, context=context, xml_output=xml_output)
# Buffer PDF into memory
out = pisaTempFile(capacity=context.capacity)
doc = PmlBaseDoc(
out,
pagesize=context.pageSize,
author=context.meta["author"].strip(),
subject=context.meta["subject"].strip(),
keywords=[x.strip() for x in
context.meta["keywords"].strip().split(",") if x],
title=context.meta["title"].strip(),
showBoundary=0,
allowSplitting=1)
# Prepare templates and their frames
if "body" in context.templateList:
body = context.templateList["body"]
del context.templateList["body"]
else:
x, y, w, h = getBox("1cm 1cm -1cm -1cm", context.pageSize)
body = PmlPageTemplate(
id="body",
frames=[
Frame(x, y, w, h,
id="body",
leftPadding=0,
rightPadding=0,
bottomPadding=0,
topPadding=0)],
pagesize = context.pageSize)
doc.addPageTemplates([body] + context.templateList.values())
# Use multibuild e.g. if a TOC has to be created
if context.multiBuild:
doc.multiBuild(context.story)
else:
doc.build(context.story)
# Add watermarks
if pyPdf:
for bgouter in context.pisaBackgroundList:
# If we have at least one background, then lets do it
if bgouter:
istream = out
output = pyPdf.PdfFileWriter()
input1 = pyPdf.PdfFileReader(istream)
ctr = 0
# TODO: Why do we loop over the same list again?
# see bgouter at line 137
for bg in context.pisaBackgroundList:
page = input1.getPage(ctr)
if (bg and not bg.notFound()
and (bg.mimetype=="application/pdf")):
bginput = pyPdf.PdfFileReader(bg.getFile())
pagebg = bginput.getPage(0)
pagebg.mergePage(page)
page = pagebg
else:
log.warn(context.warning(
"Background PDF %s doesn't exist.", bg))
output.addPage(page)
ctr += 1
out = pisaTempFile(capacity=context.capacity)
output.write(out)
# data = sout.getvalue()
# Found a background? So leave loop after first occurence
break
else:
log.warn(context.warning("pyPDF not installed!"))
# Get the resulting PDF and write it to the file object
# passed from the caller
if dest is None:
# No output file was passed - Let's use a pisaTempFile
dest = pisaTempFile(capacity=context.capacity)
context.dest = dest
data = out.getvalue() # TODO: That load all the tempfile in RAM - Why bother with a swapping tempfile then?
context.dest.write(data) # TODO: context.dest is a tempfile as well...
return context
| apache-2.0 |
Skyeouyang/Text-Analytics-Project | lexicon analysis.py | 1 | 2398 | #######################################
##Author Skye Ouyang
##Date 19th Apr.
#######################################
import glob
import os
def IsNotNull(value):
return value is not None and len(value) > 0
#create weapon list
dict_weapon = []
weapons = open('D:/1. msba/Trimester II Jan.2017-May.2017/text analytics/project/lexicon/weapon_words.txt','r')
for weapon in weapons:
t = weapon.strip().lower()
if (IsNotNull(t)):
dict_weapon.append(t)
weapons.close()
#create bloody words list
dict_bloody = []
bloodys = open('D:/1. msba/Trimester II Jan.2017-May.2017/text analytics/project/lexicon/bloody_words.txt','r')
for bloody in bloodys:
b = bloody.strip().lower()
if (IsNotNull(b)):
dict_bloody.append(b)
#create mysterious words list
dict_mysterious = []
mysteriouss = open('D:/1. msba/Trimester II Jan.2017-May.2017/text analytics/project/lexicon/mysterious_words.txt','r')
for mysterious in mysteriouss:
m = mysterious.strip().lower()
if (IsNotNull(m)):
dict_mysterious.append(m)
#input data
path ="D:/1. msba/Trimester II Jan.2017-May.2017/text analytics/project/dataset/low_score_novel"
allFiles = glob.glob(path + "/*.txt")
#file = open('D:/1. msba/Trimester II Jan.2017-May.2017/text analytics/project/dataset/high_score_novel/01. The Girl with the Dragon Tattoo.txt','r')
weapon_cnt = []
bloody_cnt = []
mysterious_cnt = []
for file in allFiles:
with open(file) as fle:
fiction = fle.read()
# set for loop
wea_cnt = 0
blo_cnt = 0
mys_cnt = 0
# count of weapon words
for word in dict_weapon:
if (word in fiction):
wea_cnt = wea_cnt + 1
for word in dict_bloody:
if (word in fiction):
blo_cnt = blo_cnt + 1
for word in dict_mysterious:
if (word in fiction):
mys_cnt = mys_cnt + 1
print (wea_cnt, blo_cnt , mys_cnt)
# write into list
weapon_cnt.append(wea_cnt)
bloody_cnt.append(blo_cnt)
mysterious_cnt.append(mys_cnt)
weapon_cnt
'''
for file in allFiles:
with open (file) as fle:
blo_cnt = 0
fiction = fle.read()
'''
#file_name = os.path.splitext(path + '/*.txt')[0]
#print ('The size of %s is ' % (file_name) + str(len(fiction)))
| apache-2.0 |
ueshin/apache-spark | python/pyspark/sql/context.py | 15 | 23877 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import warnings
from pyspark import since, _NoValue
from pyspark.sql.session import _monkey_patch_RDD, SparkSession
from pyspark.sql.dataframe import DataFrame
from pyspark.sql.readwriter import DataFrameReader
from pyspark.sql.streaming import DataStreamReader
from pyspark.sql.udf import UDFRegistration # noqa: F401
from pyspark.sql.utils import install_exception_handler
__all__ = ["SQLContext", "HiveContext"]
class SQLContext(object):
"""The entry point for working with structured data (rows and columns) in Spark, in Spark 1.x.
As of Spark 2.0, this is replaced by :class:`SparkSession`. However, we are keeping the class
here for backward compatibility.
A SQLContext can be used create :class:`DataFrame`, register :class:`DataFrame` as
tables, execute SQL over tables, cache tables, and read parquet files.
.. deprecated:: 3.0.0
Use :func:`SparkSession.builder.getOrCreate()` instead.
Parameters
----------
sparkContext : :class:`SparkContext`
The :class:`SparkContext` backing this SQLContext.
sparkSession : :class:`SparkSession`
The :class:`SparkSession` around which this SQLContext wraps.
jsqlContext : optional
An optional JVM Scala SQLContext. If set, we do not instantiate a new
SQLContext in the JVM, instead we make all calls to this object.
This is only for internal.
Examples
--------
>>> from datetime import datetime
>>> from pyspark.sql import Row
>>> sqlContext = SQLContext(sc)
>>> allTypes = sc.parallelize([Row(i=1, s="string", d=1.0, l=1,
... b=True, list=[1, 2, 3], dict={"s": 0}, row=Row(a=1),
... time=datetime(2014, 8, 1, 14, 1, 5))])
>>> df = allTypes.toDF()
>>> df.createOrReplaceTempView("allTypes")
>>> sqlContext.sql('select i+1, d+1, not b, list[1], dict["s"], time, row.a '
... 'from allTypes where b and i > 0').collect()
[Row((i + 1)=2, (d + 1)=2.0, (NOT b)=False, list[1]=2, \
dict[s]=0, time=datetime.datetime(2014, 8, 1, 14, 1, 5), a=1)]
>>> df.rdd.map(lambda x: (x.i, x.s, x.d, x.l, x.b, x.time, x.row.a, x.list)).collect()
[(1, 'string', 1.0, 1, True, datetime.datetime(2014, 8, 1, 14, 1, 5), 1, [1, 2, 3])]
"""
_instantiatedContext = None
def __init__(self, sparkContext, sparkSession=None, jsqlContext=None):
if sparkSession is None:
warnings.warn(
"Deprecated in 3.0.0. Use SparkSession.builder.getOrCreate() instead.",
FutureWarning
)
self._sc = sparkContext
self._jsc = self._sc._jsc
self._jvm = self._sc._jvm
if sparkSession is None:
sparkSession = SparkSession.builder.getOrCreate()
if jsqlContext is None:
jsqlContext = sparkSession._jwrapped
self.sparkSession = sparkSession
self._jsqlContext = jsqlContext
_monkey_patch_RDD(self.sparkSession)
install_exception_handler()
if (SQLContext._instantiatedContext is None
or SQLContext._instantiatedContext._sc._jsc is None):
SQLContext._instantiatedContext = self
@property
def _ssql_ctx(self):
"""Accessor for the JVM Spark SQL context.
Subclasses can override this property to provide their own
JVM Contexts.
"""
return self._jsqlContext
@property
def _conf(self):
"""Accessor for the JVM SQL-specific configurations"""
return self.sparkSession._jsparkSession.sessionState().conf()
@classmethod
def getOrCreate(cls, sc):
"""
Get the existing SQLContext or create a new one with given SparkContext.
.. versionadded:: 1.6.0
.. deprecated:: 3.0.0
Use :func:`SparkSession.builder.getOrCreate()` instead.
Parameters
----------
sc : :class:`SparkContext`
"""
warnings.warn(
"Deprecated in 3.0.0. Use SparkSession.builder.getOrCreate() instead.",
FutureWarning
)
if (cls._instantiatedContext is None
or SQLContext._instantiatedContext._sc._jsc is None):
jsqlContext = sc._jvm.SparkSession.builder().sparkContext(
sc._jsc.sc()).getOrCreate().sqlContext()
sparkSession = SparkSession(sc, jsqlContext.sparkSession())
cls(sc, sparkSession, jsqlContext)
return cls._instantiatedContext
def newSession(self):
"""
Returns a new SQLContext as new session, that has separate SQLConf,
registered temporary views and UDFs, but shared SparkContext and
table cache.
.. versionadded:: 1.6.0
"""
return self.__class__(self._sc, self.sparkSession.newSession())
def setConf(self, key, value):
"""Sets the given Spark SQL configuration property.
.. versionadded:: 1.3.0
"""
self.sparkSession.conf.set(key, value)
def getConf(self, key, defaultValue=_NoValue):
"""Returns the value of Spark SQL configuration property for the given key.
If the key is not set and defaultValue is set, return
defaultValue. If the key is not set and defaultValue is not set, return
the system default value.
.. versionadded:: 1.3.0
Examples
--------
>>> sqlContext.getConf("spark.sql.shuffle.partitions")
'200'
>>> sqlContext.getConf("spark.sql.shuffle.partitions", "10")
'10'
>>> sqlContext.setConf("spark.sql.shuffle.partitions", "50")
>>> sqlContext.getConf("spark.sql.shuffle.partitions", "10")
'50'
"""
return self.sparkSession.conf.get(key, defaultValue)
@property
def udf(self):
"""Returns a :class:`UDFRegistration` for UDF registration.
.. versionadded:: 1.3.1
Returns
-------
:class:`UDFRegistration`
"""
return self.sparkSession.udf
def range(self, start, end=None, step=1, numPartitions=None):
"""
Create a :class:`DataFrame` with single :class:`pyspark.sql.types.LongType` column named
``id``, containing elements in a range from ``start`` to ``end`` (exclusive) with
step value ``step``.
.. versionadded:: 1.4.0
Parameters
----------
start : int
the start value
end : int, optional
the end value (exclusive)
step : int, optional
the incremental step (default: 1)
numPartitions : int, optional
the number of partitions of the DataFrame
Returns
-------
:class:`DataFrame`
Examples
--------
>>> sqlContext.range(1, 7, 2).collect()
[Row(id=1), Row(id=3), Row(id=5)]
If only one argument is specified, it will be used as the end value.
>>> sqlContext.range(3).collect()
[Row(id=0), Row(id=1), Row(id=2)]
"""
return self.sparkSession.range(start, end, step, numPartitions)
def registerFunction(self, name, f, returnType=None):
"""An alias for :func:`spark.udf.register`.
See :meth:`pyspark.sql.UDFRegistration.register`.
.. versionadded:: 1.2.0
.. deprecated:: 2.3.0
Use :func:`spark.udf.register` instead.
"""
warnings.warn(
"Deprecated in 2.3.0. Use spark.udf.register instead.",
FutureWarning
)
return self.sparkSession.udf.register(name, f, returnType)
def registerJavaFunction(self, name, javaClassName, returnType=None):
"""An alias for :func:`spark.udf.registerJavaFunction`.
See :meth:`pyspark.sql.UDFRegistration.registerJavaFunction`.
.. versionadded:: 2.1.0
.. deprecated:: 2.3.0
Use :func:`spark.udf.registerJavaFunction` instead.
"""
warnings.warn(
"Deprecated in 2.3.0. Use spark.udf.registerJavaFunction instead.",
FutureWarning
)
return self.sparkSession.udf.registerJavaFunction(name, javaClassName, returnType)
# TODO(andrew): delete this once we refactor things to take in SparkSession
def _inferSchema(self, rdd, samplingRatio=None):
"""
Infer schema from an RDD of Row or tuple.
Parameters
----------
rdd : :class:`RDD`
an RDD of Row or tuple
samplingRatio : float, optional
sampling ratio, or no sampling (default)
Returns
-------
:class:`pyspark.sql.types.StructType`
"""
return self.sparkSession._inferSchema(rdd, samplingRatio)
def createDataFrame(self, data, schema=None, samplingRatio=None, verifySchema=True):
"""
Creates a :class:`DataFrame` from an :class:`RDD`, a list or a :class:`pandas.DataFrame`.
When ``schema`` is a list of column names, the type of each column
will be inferred from ``data``.
When ``schema`` is ``None``, it will try to infer the schema (column names and types)
from ``data``, which should be an RDD of :class:`Row`,
or :class:`namedtuple`, or :class:`dict`.
When ``schema`` is :class:`pyspark.sql.types.DataType` or a datatype string it must match
the real data, or an exception will be thrown at runtime. If the given schema is not
:class:`pyspark.sql.types.StructType`, it will be wrapped into a
:class:`pyspark.sql.types.StructType` as its only field, and the field name will be "value",
each record will also be wrapped into a tuple, which can be converted to row later.
If schema inference is needed, ``samplingRatio`` is used to determined the ratio of
rows used for schema inference. The first row will be used if ``samplingRatio`` is ``None``.
.. versionadded:: 1.3.0
.. versionchanged:: 2.0.0
The ``schema`` parameter can be a :class:`pyspark.sql.types.DataType` or a
datatype string after 2.0.
If it's not a :class:`pyspark.sql.types.StructType`, it will be wrapped into a
:class:`pyspark.sql.types.StructType` and each record will also be wrapped into a tuple.
.. versionchanged:: 2.1.0
Added verifySchema.
Parameters
----------
data : :class:`RDD` or iterable
an RDD of any kind of SQL data representation (:class:`Row`,
:class:`tuple`, ``int``, ``boolean``, etc.), or :class:`list`, or
:class:`pandas.DataFrame`.
schema : :class:`pyspark.sql.types.DataType`, str or list, optional
a :class:`pyspark.sql.types.DataType` or a datatype string or a list of
column names, default is None. The data type string format equals to
:class:`pyspark.sql.types.DataType.simpleString`, except that top level struct type can
omit the ``struct<>`` and atomic types use ``typeName()`` as their format, e.g. use
``byte`` instead of ``tinyint`` for :class:`pyspark.sql.types.ByteType`.
We can also use ``int`` as a short name for :class:`pyspark.sql.types.IntegerType`.
samplingRatio : float, optional
the sample ratio of rows used for inferring
verifySchema : bool, optional
verify data types of every row against schema. Enabled by default.
Returns
-------
:class:`DataFrame`
Examples
--------
>>> l = [('Alice', 1)]
>>> sqlContext.createDataFrame(l).collect()
[Row(_1='Alice', _2=1)]
>>> sqlContext.createDataFrame(l, ['name', 'age']).collect()
[Row(name='Alice', age=1)]
>>> d = [{'name': 'Alice', 'age': 1}]
>>> sqlContext.createDataFrame(d).collect()
[Row(age=1, name='Alice')]
>>> rdd = sc.parallelize(l)
>>> sqlContext.createDataFrame(rdd).collect()
[Row(_1='Alice', _2=1)]
>>> df = sqlContext.createDataFrame(rdd, ['name', 'age'])
>>> df.collect()
[Row(name='Alice', age=1)]
>>> from pyspark.sql import Row
>>> Person = Row('name', 'age')
>>> person = rdd.map(lambda r: Person(*r))
>>> df2 = sqlContext.createDataFrame(person)
>>> df2.collect()
[Row(name='Alice', age=1)]
>>> from pyspark.sql.types import *
>>> schema = StructType([
... StructField("name", StringType(), True),
... StructField("age", IntegerType(), True)])
>>> df3 = sqlContext.createDataFrame(rdd, schema)
>>> df3.collect()
[Row(name='Alice', age=1)]
>>> sqlContext.createDataFrame(df.toPandas()).collect() # doctest: +SKIP
[Row(name='Alice', age=1)]
>>> sqlContext.createDataFrame(pandas.DataFrame([[1, 2]])).collect() # doctest: +SKIP
[Row(0=1, 1=2)]
>>> sqlContext.createDataFrame(rdd, "a: string, b: int").collect()
[Row(a='Alice', b=1)]
>>> rdd = rdd.map(lambda row: row[1])
>>> sqlContext.createDataFrame(rdd, "int").collect()
[Row(value=1)]
>>> sqlContext.createDataFrame(rdd, "boolean").collect() # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
Py4JJavaError: ...
"""
return self.sparkSession.createDataFrame(data, schema, samplingRatio, verifySchema)
def registerDataFrameAsTable(self, df, tableName):
"""Registers the given :class:`DataFrame` as a temporary table in the catalog.
Temporary tables exist only during the lifetime of this instance of :class:`SQLContext`.
.. versionadded:: 1.3.0
Examples
--------
>>> sqlContext.registerDataFrameAsTable(df, "table1")
"""
df.createOrReplaceTempView(tableName)
def dropTempTable(self, tableName):
""" Remove the temporary table from catalog.
.. versionadded:: 1.6.0
Examples
--------
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> sqlContext.dropTempTable("table1")
"""
self.sparkSession.catalog.dropTempView(tableName)
def createExternalTable(self, tableName, path=None, source=None, schema=None, **options):
"""Creates an external table based on the dataset in a data source.
It returns the DataFrame associated with the external table.
The data source is specified by the ``source`` and a set of ``options``.
If ``source`` is not specified, the default data source configured by
``spark.sql.sources.default`` will be used.
Optionally, a schema can be provided as the schema of the returned :class:`DataFrame` and
created external table.
.. versionadded:: 1.3.0
Returns
-------
:class:`DataFrame`
"""
return self.sparkSession.catalog.createExternalTable(
tableName, path, source, schema, **options)
def sql(self, sqlQuery):
"""Returns a :class:`DataFrame` representing the result of the given query.
.. versionadded:: 1.0.0
Returns
-------
:class:`DataFrame`
Examples
--------
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> df2 = sqlContext.sql("SELECT field1 AS f1, field2 as f2 from table1")
>>> df2.collect()
[Row(f1=1, f2='row1'), Row(f1=2, f2='row2'), Row(f1=3, f2='row3')]
"""
return self.sparkSession.sql(sqlQuery)
def table(self, tableName):
"""Returns the specified table or view as a :class:`DataFrame`.
.. versionadded:: 1.0.0
Returns
-------
:class:`DataFrame`
Examples
--------
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> df2 = sqlContext.table("table1")
>>> sorted(df.collect()) == sorted(df2.collect())
True
"""
return self.sparkSession.table(tableName)
def tables(self, dbName=None):
"""Returns a :class:`DataFrame` containing names of tables in the given database.
If ``dbName`` is not specified, the current database will be used.
The returned DataFrame has two columns: ``tableName`` and ``isTemporary``
(a column with :class:`BooleanType` indicating if a table is a temporary one or not).
.. versionadded:: 1.3.0
Parameters
----------
dbName: str, optional
name of the database to use.
Returns
-------
:class:`DataFrame`
Examples
--------
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> df2 = sqlContext.tables()
>>> df2.filter("tableName = 'table1'").first()
Row(namespace='', tableName='table1', isTemporary=True)
"""
if dbName is None:
return DataFrame(self._ssql_ctx.tables(), self)
else:
return DataFrame(self._ssql_ctx.tables(dbName), self)
def tableNames(self, dbName=None):
"""Returns a list of names of tables in the database ``dbName``.
.. versionadded:: 1.3.0
Parameters
----------
dbName: str
name of the database to use. Default to the current database.
Returns
-------
list
list of table names, in string
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> "table1" in sqlContext.tableNames()
True
>>> "table1" in sqlContext.tableNames("default")
True
"""
if dbName is None:
return [name for name in self._ssql_ctx.tableNames()]
else:
return [name for name in self._ssql_ctx.tableNames(dbName)]
@since(1.0)
def cacheTable(self, tableName):
"""Caches the specified table in-memory."""
self._ssql_ctx.cacheTable(tableName)
@since(1.0)
def uncacheTable(self, tableName):
"""Removes the specified table from the in-memory cache."""
self._ssql_ctx.uncacheTable(tableName)
@since(1.3)
def clearCache(self):
"""Removes all cached tables from the in-memory cache. """
self._ssql_ctx.clearCache()
@property
def read(self):
"""
Returns a :class:`DataFrameReader` that can be used to read data
in as a :class:`DataFrame`.
.. versionadded:: 1.4.0
Returns
-------
:class:`DataFrameReader`
"""
return DataFrameReader(self)
@property
def readStream(self):
"""
Returns a :class:`DataStreamReader` that can be used to read data streams
as a streaming :class:`DataFrame`.
.. versionadded:: 2.0.0
Notes
-----
This API is evolving.
Returns
-------
:class:`DataStreamReader`
>>> text_sdf = sqlContext.readStream.text(tempfile.mkdtemp())
>>> text_sdf.isStreaming
True
"""
return DataStreamReader(self)
@property
def streams(self):
"""Returns a :class:`StreamingQueryManager` that allows managing all the
:class:`StreamingQuery` StreamingQueries active on `this` context.
.. versionadded:: 2.0.0
Notes
-----
This API is evolving.
"""
from pyspark.sql.streaming import StreamingQueryManager
return StreamingQueryManager(self._ssql_ctx.streams())
class HiveContext(SQLContext):
"""A variant of Spark SQL that integrates with data stored in Hive.
Configuration for Hive is read from ``hive-site.xml`` on the classpath.
It supports running both SQL and HiveQL commands.
.. deprecated:: 2.0.0
Use SparkSession.builder.enableHiveSupport().getOrCreate().
Parameters
----------
sparkContext : :class:`SparkContext`
The SparkContext to wrap.
jhiveContext : optional
An optional JVM Scala HiveContext. If set, we do not instantiate a new
:class:`HiveContext` in the JVM, instead we make all calls to this object.
This is only for internal use.
"""
def __init__(self, sparkContext, jhiveContext=None):
warnings.warn(
"HiveContext is deprecated in Spark 2.0.0. Please use " +
"SparkSession.builder.enableHiveSupport().getOrCreate() instead.",
FutureWarning
)
if jhiveContext is None:
sparkContext._conf.set("spark.sql.catalogImplementation", "hive")
sparkSession = SparkSession.builder._sparkContext(sparkContext).getOrCreate()
else:
sparkSession = SparkSession(sparkContext, jhiveContext.sparkSession())
SQLContext.__init__(self, sparkContext, sparkSession, jhiveContext)
@classmethod
def _createForTesting(cls, sparkContext):
"""(Internal use only) Create a new HiveContext for testing.
All test code that touches HiveContext *must* go through this method. Otherwise,
you may end up launching multiple derby instances and encounter with incredibly
confusing error messages.
"""
jsc = sparkContext._jsc.sc()
jtestHive = sparkContext._jvm.org.apache.spark.sql.hive.test.TestHiveContext(jsc, False)
return cls(sparkContext, jtestHive)
def refreshTable(self, tableName):
"""Invalidate and refresh all the cached the metadata of the given
table. For performance reasons, Spark SQL or the external data source
library it uses might cache certain metadata about a table, such as the
location of blocks. When those change outside of Spark SQL, users should
call this function to invalidate the cache.
"""
self._ssql_ctx.refreshTable(tableName)
def _test():
import os
import doctest
import tempfile
from pyspark.context import SparkContext
from pyspark.sql import Row, SQLContext
import pyspark.sql.context
os.chdir(os.environ["SPARK_HOME"])
globs = pyspark.sql.context.__dict__.copy()
sc = SparkContext('local[4]', 'PythonTest')
globs['tempfile'] = tempfile
globs['os'] = os
globs['sc'] = sc
globs['sqlContext'] = SQLContext(sc)
globs['rdd'] = rdd = sc.parallelize(
[Row(field1=1, field2="row1"),
Row(field1=2, field2="row2"),
Row(field1=3, field2="row3")]
)
globs['df'] = rdd.toDF()
jsonStrings = [
'{"field1": 1, "field2": "row1", "field3":{"field4":11}}',
'{"field1" : 2, "field3":{"field4":22, "field5": [10, 11]},"field6":[{"field7": "row2"}]}',
'{"field1" : null, "field2": "row3", "field3":{"field4":33, "field5": []}}'
]
globs['jsonStrings'] = jsonStrings
globs['json'] = sc.parallelize(jsonStrings)
(failure_count, test_count) = doctest.testmod(
pyspark.sql.context, globs=globs,
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs['sc'].stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
_test()
| apache-2.0 |
onceuponatimeforever/oh-mainline | vendor/packages/gdata/samples/contentforshopping/add_product.py | 32 | 1571 | #!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import getpass
from gdata.contentforshopping.data import build_entry
from gdata.contentforshopping.client import ContentForShoppingClient
# Gather merchant information
account_id = raw_input('Merchant Account ID? ').strip()
email = raw_input('Google Email Address? ').strip()
# Create a client
client = ContentForShoppingClient(account_id)
# Perform programmatic login
client.client_login(email, getpass.getpass('Google Password? '),
'Shopping API for Content sample', 'structuredcontent')
# Generate a product entry
product_entry = build_entry(
product_id='ipod2',
target_country = 'US',
content_language = 'EN',
title='iPod Nano 8GB',
content='A nice small mp3 player',
price='149',
price_unit='USD',
shipping_price = '5',
shipping_price_unit = 'USD',
tax_rate='17.5',
condition = 'new',
link = 'http://pseudoscience.co.uk/google4e823e35f032f011.html',
)
# Post it to the service
client.insert_product(product_entry)
| agpl-3.0 |
NeovaHealth/odoo | addons/marketing_campaign/marketing_campaign.py | 51 | 41800 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2013 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
import base64
import itertools
from datetime import datetime
from dateutil.relativedelta import relativedelta
from operator import itemgetter
from traceback import format_exception
from sys import exc_info
from openerp.tools.safe_eval import safe_eval as eval
import re
from openerp.addons.decimal_precision import decimal_precision as dp
from openerp import api
from openerp.osv import fields, osv
from openerp.report import render_report
from openerp.tools.translate import _
_intervalTypes = {
'hours': lambda interval: relativedelta(hours=interval),
'days': lambda interval: relativedelta(days=interval),
'months': lambda interval: relativedelta(months=interval),
'years': lambda interval: relativedelta(years=interval),
}
DT_FMT = '%Y-%m-%d %H:%M:%S'
class marketing_campaign(osv.osv):
_name = "marketing.campaign"
_description = "Marketing Campaign"
def _count_segments(self, cr, uid, ids, field_name, arg, context=None):
res = {}
try:
for segments in self.browse(cr, uid, ids, context=context):
res[segments.id] = len(segments.segment_ids)
except:
pass
return res
_columns = {
'name': fields.char('Name', required=True),
'object_id': fields.many2one('ir.model', 'Resource', required=True,
help="Choose the resource on which you want \
this campaign to be run"),
'partner_field_id': fields.many2one('ir.model.fields', 'Partner Field',
domain="[('model_id', '=', object_id), ('ttype', '=', 'many2one'), ('relation', '=', 'res.partner')]",
help="The generated workitems will be linked to the partner related to the record. "\
"If the record is the partner itself leave this field empty. "\
"This is useful for reporting purposes, via the Campaign Analysis or Campaign Follow-up views."),
'unique_field_id': fields.many2one('ir.model.fields', 'Unique Field',
domain="[('model_id', '=', object_id), ('ttype', 'in', ['char','int','many2one','text','selection'])]",
help='If set, this field will help segments that work in "no duplicates" mode to avoid '\
'selecting similar records twice. Similar records are records that have the same value for '\
'this unique field. For example by choosing the "email_from" field for CRM Leads you would prevent '\
'sending the same campaign to the same email address again. If not set, the "no duplicates" segments '\
"will only avoid selecting the same record again if it entered the campaign previously. "\
"Only easily comparable fields like textfields, integers, selections or single relationships may be used."),
'mode': fields.selection([('test', 'Test Directly'),
('test_realtime', 'Test in Realtime'),
('manual', 'With Manual Confirmation'),
('active', 'Normal')],
'Mode', required=True, help= \
"""Test - It creates and process all the activities directly (without waiting for the delay on transitions) but does not send emails or produce reports.
Test in Realtime - It creates and processes all the activities directly but does not send emails or produce reports.
With Manual Confirmation - the campaigns runs normally, but the user has to validate all workitem manually.
Normal - the campaign runs normally and automatically sends all emails and reports (be very careful with this mode, you're live!)"""),
'state': fields.selection([('draft', 'New'),
('running', 'Running'),
('cancelled', 'Cancelled'),
('done', 'Done')],
'Status', copy=False),
'activity_ids': fields.one2many('marketing.campaign.activity',
'campaign_id', 'Activities'),
'fixed_cost': fields.float('Fixed Cost', help="Fixed cost for running this campaign. You may also specify variable cost and revenue on each campaign activity. Cost and Revenue statistics are included in Campaign Reporting.", digits_compute=dp.get_precision('Product Price')),
'segment_ids': fields.one2many('marketing.campaign.segment', 'campaign_id', 'Segments', readonly=False),
'segments_count': fields.function(_count_segments, type='integer', string='Segments')
}
_defaults = {
'state': lambda *a: 'draft',
'mode': lambda *a: 'test',
}
def state_running_set(self, cr, uid, ids, *args):
# TODO check that all subcampaigns are running
campaign = self.browse(cr, uid, ids[0])
if not campaign.activity_ids:
raise osv.except_osv(_("Error"), _("The campaign cannot be started. There are no activities in it."))
has_start = False
has_signal_without_from = False
for activity in campaign.activity_ids:
if activity.start:
has_start = True
if activity.signal and len(activity.from_ids) == 0:
has_signal_without_from = True
if not has_start and not has_signal_without_from:
raise osv.except_osv(_("Error"), _("The campaign cannot be started. It does not have any starting activity. Modify campaign's activities to mark one as the starting point."))
return self.write(cr, uid, ids, {'state': 'running'})
def state_done_set(self, cr, uid, ids, *args):
# TODO check that this campaign is not a subcampaign in running mode.
segment_ids = self.pool.get('marketing.campaign.segment').search(cr, uid,
[('campaign_id', 'in', ids),
('state', '=', 'running')])
if segment_ids :
raise osv.except_osv(_("Error"), _("The campaign cannot be marked as done before all segments are closed."))
self.write(cr, uid, ids, {'state': 'done'})
return True
def state_cancel_set(self, cr, uid, ids, *args):
# TODO check that this campaign is not a subcampaign in running mode.
self.write(cr, uid, ids, {'state': 'cancelled'})
return True
# dead code
def signal(self, cr, uid, model, res_id, signal, run_existing=True, context=None):
record = self.pool[model].browse(cr, uid, res_id, context)
return self._signal(cr, uid, record, signal, run_existing, context)
#dead code
def _signal(self, cr, uid, record, signal, run_existing=True, context=None):
if not signal:
raise ValueError('Signal cannot be False.')
Workitems = self.pool.get('marketing.campaign.workitem')
domain = [('object_id.model', '=', record._name),
('state', '=', 'running')]
campaign_ids = self.search(cr, uid, domain, context=context)
for campaign in self.browse(cr, uid, campaign_ids, context=context):
for activity in campaign.activity_ids:
if activity.signal != signal:
continue
data = dict(activity_id=activity.id,
res_id=record.id,
state='todo')
wi_domain = [(k, '=', v) for k, v in data.items()]
wi_ids = Workitems.search(cr, uid, wi_domain, context=context)
if wi_ids:
if not run_existing:
continue
else:
partner = self._get_partner_for(campaign, record)
if partner:
data['partner_id'] = partner.id
wi_id = Workitems.create(cr, uid, data, context=context)
wi_ids = [wi_id]
Workitems.process(cr, uid, wi_ids, context=context)
return True
def _get_partner_for(self, campaign, record):
partner_field = campaign.partner_field_id.name
if partner_field:
return record[partner_field]
elif campaign.object_id.model == 'res.partner':
return record
return None
# prevent duplication until the server properly duplicates several levels of nested o2m
def copy(self, cr, uid, id, default=None, context=None):
raise osv.except_osv(_("Operation not supported"), _("You cannot duplicate a campaign, Not supported yet."))
def _find_duplicate_workitems(self, cr, uid, record, campaign_rec, context=None):
"""Finds possible duplicates workitems for a record in this campaign, based on a uniqueness
field.
:param record: browse_record to find duplicates workitems for.
:param campaign_rec: browse_record of campaign
"""
Workitems = self.pool.get('marketing.campaign.workitem')
duplicate_workitem_domain = [('res_id','=', record.id),
('campaign_id','=', campaign_rec.id)]
unique_field = campaign_rec.unique_field_id
if unique_field:
unique_value = getattr(record, unique_field.name, None)
if unique_value:
if unique_field.ttype == 'many2one':
unique_value = unique_value.id
similar_res_ids = self.pool[campaign_rec.object_id.model].search(cr, uid,
[(unique_field.name, '=', unique_value)], context=context)
if similar_res_ids:
duplicate_workitem_domain = [('res_id','in', similar_res_ids),
('campaign_id','=', campaign_rec.id)]
return Workitems.search(cr, uid, duplicate_workitem_domain, context=context)
class marketing_campaign_segment(osv.osv):
_name = "marketing.campaign.segment"
_description = "Campaign Segment"
_order = "name"
def _get_next_sync(self, cr, uid, ids, fn, args, context=None):
# next auto sync date is same for all segments
sync_job = self.pool.get('ir.model.data').get_object(cr, uid, 'marketing_campaign', 'ir_cron_marketing_campaign_every_day', context=context)
next_sync = sync_job and sync_job.nextcall or False
return dict.fromkeys(ids, next_sync)
_columns = {
'name': fields.char('Name', required=True),
'campaign_id': fields.many2one('marketing.campaign', 'Campaign', required=True, select=1, ondelete="cascade"),
'object_id': fields.related('campaign_id','object_id', type='many2one', relation='ir.model', string='Resource'),
'ir_filter_id': fields.many2one('ir.filters', 'Filter', ondelete="restrict",
help="Filter to select the matching resource records that belong to this segment. "\
"New filters can be created and saved using the advanced search on the list view of the Resource. "\
"If no filter is set, all records are selected without filtering. "\
"The synchronization mode may also add a criterion to the filter."),
'sync_last_date': fields.datetime('Last Synchronization', help="Date on which this segment was synchronized last time (automatically or manually)"),
'sync_mode': fields.selection([('create_date', 'Only records created after last sync'),
('write_date', 'Only records modified after last sync (no duplicates)'),
('all', 'All records (no duplicates)')],
'Synchronization mode',
help="Determines an additional criterion to add to the filter when selecting new records to inject in the campaign. "\
'"No duplicates" prevents selecting records which have already entered the campaign previously.'\
'If the campaign has a "unique field" set, "no duplicates" will also prevent selecting records which have '\
'the same value for the unique field as other records that already entered the campaign.'),
'state': fields.selection([('draft', 'New'),
('cancelled', 'Cancelled'),
('running', 'Running'),
('done', 'Done')],
'Status', copy=False),
'date_run': fields.datetime('Launch Date', help="Initial start date of this segment."),
'date_done': fields.datetime('End Date', help="Date this segment was last closed or cancelled."),
'date_next_sync': fields.function(_get_next_sync, string='Next Synchronization', type='datetime', help="Next time the synchronization job is scheduled to run automatically"),
}
_defaults = {
'state': lambda *a: 'draft',
'sync_mode': lambda *a: 'create_date',
}
def _check_model(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
if not obj.ir_filter_id:
return True
if obj.campaign_id.object_id.model != obj.ir_filter_id.model_id:
return False
return True
_constraints = [
(_check_model, 'Model of filter must be same as resource model of Campaign ', ['ir_filter_id,campaign_id']),
]
def onchange_campaign_id(self, cr, uid, ids, campaign_id):
res = {'domain':{'ir_filter_id':[]}}
campaign_pool = self.pool.get('marketing.campaign')
if campaign_id:
campaign = campaign_pool.browse(cr, uid, campaign_id)
model_name = self.pool.get('ir.model').read(cr, uid, [campaign.object_id.id], ['model'])
if model_name:
mod_name = model_name[0]['model']
res['domain'] = {'ir_filter_id': [('model_id', '=', mod_name)]}
else:
res['value'] = {'ir_filter_id': False}
return res
def state_running_set(self, cr, uid, ids, *args):
segment = self.browse(cr, uid, ids[0])
vals = {'state': 'running'}
if not segment.date_run:
vals['date_run'] = time.strftime('%Y-%m-%d %H:%M:%S')
self.write(cr, uid, ids, vals)
return True
def state_done_set(self, cr, uid, ids, *args):
wi_ids = self.pool.get("marketing.campaign.workitem").search(cr, uid,
[('state', '=', 'todo'), ('segment_id', 'in', ids)])
self.pool.get("marketing.campaign.workitem").write(cr, uid, wi_ids, {'state':'cancelled'})
self.write(cr, uid, ids, {'state': 'done','date_done': time.strftime('%Y-%m-%d %H:%M:%S')})
return True
def state_cancel_set(self, cr, uid, ids, *args):
wi_ids = self.pool.get("marketing.campaign.workitem").search(cr, uid,
[('state', '=', 'todo'), ('segment_id', 'in', ids)])
self.pool.get("marketing.campaign.workitem").write(cr, uid, wi_ids, {'state':'cancelled'})
self.write(cr, uid, ids, {'state': 'cancelled','date_done': time.strftime('%Y-%m-%d %H:%M:%S')})
return True
def synchroniz(self, cr, uid, ids, *args):
self.process_segment(cr, uid, ids)
return True
@api.cr_uid_ids_context
def process_segment(self, cr, uid, segment_ids=None, context=None):
Workitems = self.pool.get('marketing.campaign.workitem')
Campaigns = self.pool.get('marketing.campaign')
if not segment_ids:
segment_ids = self.search(cr, uid, [('state', '=', 'running')], context=context)
action_date = time.strftime('%Y-%m-%d %H:%M:%S')
campaigns = set()
for segment in self.browse(cr, uid, segment_ids, context=context):
if segment.campaign_id.state != 'running':
continue
campaigns.add(segment.campaign_id.id)
act_ids = self.pool.get('marketing.campaign.activity').search(cr,
uid, [('start', '=', True), ('campaign_id', '=', segment.campaign_id.id)], context=context)
model_obj = self.pool[segment.object_id.model]
criteria = []
if segment.sync_last_date and segment.sync_mode != 'all':
criteria += [(segment.sync_mode, '>', segment.sync_last_date)]
if segment.ir_filter_id:
criteria += eval(segment.ir_filter_id.domain)
object_ids = model_obj.search(cr, uid, criteria, context=context)
# XXX TODO: rewrite this loop more efficiently without doing 1 search per record!
for record in model_obj.browse(cr, uid, object_ids, context=context):
# avoid duplicate workitem for the same resource
if segment.sync_mode in ('write_date','all'):
if Campaigns._find_duplicate_workitems(cr, uid, record, segment.campaign_id, context=context):
continue
wi_vals = {
'segment_id': segment.id,
'date': action_date,
'state': 'todo',
'res_id': record.id
}
partner = self.pool.get('marketing.campaign')._get_partner_for(segment.campaign_id, record)
if partner:
wi_vals['partner_id'] = partner.id
for act_id in act_ids:
wi_vals['activity_id'] = act_id
Workitems.create(cr, uid, wi_vals, context=context)
self.write(cr, uid, segment.id, {'sync_last_date':action_date}, context=context)
Workitems.process_all(cr, uid, list(campaigns), context=context)
return True
class marketing_campaign_activity(osv.osv):
_name = "marketing.campaign.activity"
_order = "name"
_description = "Campaign Activity"
_action_types = [
('email', 'Email'),
('report', 'Report'),
('action', 'Custom Action'),
# TODO implement the subcampaigns.
# TODO implement the subcampaign out. disallow out transitions from
# subcampaign activities ?
#('subcampaign', 'Sub-Campaign'),
]
_columns = {
'name': fields.char('Name', required=True),
'campaign_id': fields.many2one('marketing.campaign', 'Campaign',
required = True, ondelete='cascade', select=1),
'object_id': fields.related('campaign_id','object_id',
type='many2one', relation='ir.model',
string='Object', readonly=True),
'start': fields.boolean('Start', help= "This activity is launched when the campaign starts.", select=True),
'condition': fields.text('Condition', size=256, required=True,
help="Python expression to decide whether the activity can be executed, otherwise it will be deleted or cancelled."
"The expression may use the following [browsable] variables:\n"
" - activity: the campaign activity\n"
" - workitem: the campaign workitem\n"
" - resource: the resource object this campaign item represents\n"
" - transitions: list of campaign transitions outgoing from this activity\n"
"...- re: Python regular expression module"),
'type': fields.selection(_action_types, 'Type', required=True,
help="""The type of action to execute when an item enters this activity, such as:
- Email: send an email using a predefined email template
- Report: print an existing Report defined on the resource item and save it into a specific directory
- Custom Action: execute a predefined action, e.g. to modify the fields of the resource record
"""),
'email_template_id': fields.many2one('email.template', "Email Template", help='The email to send when this activity is activated'),
'report_id': fields.many2one('ir.actions.report.xml', "Report", help='The report to generate when this activity is activated', ),
'report_directory_id': fields.many2one('document.directory','Directory',
help="This folder is used to store the generated reports"),
'server_action_id': fields.many2one('ir.actions.server', string='Action',
help= "The action to perform when this activity is activated"),
'to_ids': fields.one2many('marketing.campaign.transition',
'activity_from_id',
'Next Activities'),
'from_ids': fields.one2many('marketing.campaign.transition',
'activity_to_id',
'Previous Activities'),
'variable_cost': fields.float('Variable Cost', help="Set a variable cost if you consider that every campaign item that has reached this point has entailed a certain cost. You can get cost statistics in the Reporting section", digits_compute=dp.get_precision('Product Price')),
'revenue': fields.float('Revenue', help="Set an expected revenue if you consider that every campaign item that has reached this point has generated a certain revenue. You can get revenue statistics in the Reporting section", digits_compute=dp.get_precision('Account')),
'signal': fields.char('Signal',
help='An activity with a signal can be called programmatically. Be careful, the workitem is always created when a signal is sent'),
'keep_if_condition_not_met': fields.boolean("Don't Delete Workitems",
help="By activating this option, workitems that aren't executed because the condition is not met are marked as cancelled instead of being deleted.")
}
_defaults = {
'type': lambda *a: 'email',
'condition': lambda *a: 'True',
}
def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False):
if context == None:
context = {}
if 'segment_id' in context and context['segment_id']:
segment_obj = self.pool.get('marketing.campaign.segment').browse(cr,
uid, context['segment_id'])
act_ids = []
for activity in segment_obj.campaign_id.activity_ids:
act_ids.append(activity.id)
return act_ids
return super(marketing_campaign_activity, self).search(cr, uid, args,
offset, limit, order, context, count)
#dead code
def _process_wi_report(self, cr, uid, activity, workitem, context=None):
report_data, format = render_report(cr, uid, [], activity.report_id.report_name, {}, context=context)
attach_vals = {
'name': '%s_%s_%s'%(activity.report_id.report_name,
activity.name,workitem.partner_id.name),
'datas_fname': '%s.%s'%(activity.report_id.report_name,
activity.report_id.report_type),
'parent_id': activity.report_directory_id.id,
'datas': base64.encodestring(report_data),
'file_type': format
}
self.pool.get('ir.attachment').create(cr, uid, attach_vals)
return True
def _process_wi_email(self, cr, uid, activity, workitem, context=None):
return self.pool.get('email.template').send_mail(cr, uid,
activity.email_template_id.id,
workitem.res_id, context=context)
#dead code
def _process_wi_action(self, cr, uid, activity, workitem, context=None):
if context is None:
context = {}
server_obj = self.pool.get('ir.actions.server')
action_context = dict(context,
active_id=workitem.res_id,
active_ids=[workitem.res_id],
active_model=workitem.object_id.model,
workitem=workitem)
server_obj.run(cr, uid, [activity.server_action_id.id],
context=action_context)
return True
def process(self, cr, uid, act_id, wi_id, context=None):
activity = self.browse(cr, uid, act_id, context=context)
method = '_process_wi_%s' % (activity.type,)
action = getattr(self, method, None)
if not action:
raise NotImplementedError('Method %r is not implemented on %r object.' % (method, self))
workitem_obj = self.pool.get('marketing.campaign.workitem')
workitem = workitem_obj.browse(cr, uid, wi_id, context=context)
return action(cr, uid, activity, workitem, context=context)
class marketing_campaign_transition(osv.osv):
_name = "marketing.campaign.transition"
_description = "Campaign Transition"
_interval_units = [
('hours', 'Hour(s)'),
('days', 'Day(s)'),
('months', 'Month(s)'),
('years', 'Year(s)'),
]
def _get_name(self, cr, uid, ids, fn, args, context=None):
# name formatters that depend on trigger
formatters = {
'auto': _('Automatic transition'),
'time': _('After %(interval_nbr)d %(interval_type)s'),
'cosmetic': _('Cosmetic'),
}
# get the translations of the values of selection field 'interval_type'
fields = self.fields_get(cr, uid, ['interval_type'], context=context)
interval_type_selection = dict(fields['interval_type']['selection'])
result = dict.fromkeys(ids, False)
for trans in self.browse(cr, uid, ids, context=context):
values = {
'interval_nbr': trans.interval_nbr,
'interval_type': interval_type_selection.get(trans.interval_type, ''),
}
result[trans.id] = formatters[trans.trigger] % values
return result
def _delta(self, cr, uid, ids, context=None):
assert len(ids) == 1
transition = self.browse(cr, uid, ids[0], context=context)
if transition.trigger != 'time':
raise ValueError('Delta is only relevant for timed transition.')
return relativedelta(**{str(transition.interval_type): transition.interval_nbr})
_columns = {
'name': fields.function(_get_name, string='Name',
type='char', size=128),
'activity_from_id': fields.many2one('marketing.campaign.activity',
'Previous Activity', select=1,
required=True, ondelete="cascade"),
'activity_to_id': fields.many2one('marketing.campaign.activity',
'Next Activity',
required=True, ondelete="cascade"),
'interval_nbr': fields.integer('Interval Value', required=True),
'interval_type': fields.selection(_interval_units, 'Interval Unit',
required=True),
'trigger': fields.selection([('auto', 'Automatic'),
('time', 'Time'),
('cosmetic', 'Cosmetic'), # fake plastic transition
],
'Trigger', required=True,
help="How is the destination workitem triggered"),
}
_defaults = {
'interval_nbr': 1,
'interval_type': 'days',
'trigger': 'time',
}
def _check_campaign(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
if obj.activity_from_id.campaign_id != obj.activity_to_id.campaign_id:
return False
return True
_constraints = [
(_check_campaign, 'The To/From Activity of transition must be of the same Campaign ', ['activity_from_id,activity_to_id']),
]
_sql_constraints = [
('interval_positive', 'CHECK(interval_nbr >= 0)', 'The interval must be positive or zero')
]
class marketing_campaign_workitem(osv.osv):
_name = "marketing.campaign.workitem"
_description = "Campaign Workitem"
def _res_name_get(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, '/')
for wi in self.browse(cr, uid, ids, context=context):
if not wi.res_id:
continue
proxy = self.pool[wi.object_id.model]
if not proxy.exists(cr, uid, [wi.res_id]):
continue
ng = proxy.name_get(cr, uid, [wi.res_id], context=context)
if ng:
res[wi.id] = ng[0][1]
return res
def _resource_search(self, cr, uid, obj, name, args, domain=None, context=None):
"""Returns id of workitem whose resource_name matches with the given name"""
if not len(args):
return []
condition_name = None
for domain_item in args:
# we only use the first domain criterion and ignore all the rest including operators
if isinstance(domain_item, (list,tuple)) and len(domain_item) == 3 and domain_item[0] == 'res_name':
condition_name = [None, domain_item[1], domain_item[2]]
break
assert condition_name, "Invalid search domain for marketing_campaign_workitem.res_name. It should use 'res_name'"
cr.execute("""select w.id, w.res_id, m.model \
from marketing_campaign_workitem w \
left join marketing_campaign_activity a on (a.id=w.activity_id)\
left join marketing_campaign c on (c.id=a.campaign_id)\
left join ir_model m on (m.id=c.object_id)
""")
res = cr.fetchall()
workitem_map = {}
matching_workitems = []
for id, res_id, model in res:
workitem_map.setdefault(model,{}).setdefault(res_id,set()).add(id)
for model, id_map in workitem_map.iteritems():
model_pool = self.pool[model]
condition_name[0] = model_pool._rec_name
condition = [('id', 'in', id_map.keys()), condition_name]
for res_id in model_pool.search(cr, uid, condition, context=context):
matching_workitems.extend(id_map[res_id])
return [('id', 'in', list(set(matching_workitems)))]
_columns = {
'segment_id': fields.many2one('marketing.campaign.segment', 'Segment', readonly=True),
'activity_id': fields.many2one('marketing.campaign.activity','Activity',
required=True, readonly=True),
'campaign_id': fields.related('activity_id', 'campaign_id',
type='many2one', relation='marketing.campaign', string='Campaign', readonly=True, store=True),
'object_id': fields.related('activity_id', 'campaign_id', 'object_id',
type='many2one', relation='ir.model', string='Resource', select=1, readonly=True, store=True),
'res_id': fields.integer('Resource ID', select=1, readonly=True),
'res_name': fields.function(_res_name_get, string='Resource Name', fnct_search=_resource_search, type="char", size=64),
'date': fields.datetime('Execution Date', help='If date is not set, this workitem has to be run manually', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', select=1, readonly=True),
'state': fields.selection([ ('todo', 'To Do'),
('cancelled', 'Cancelled'),
('exception', 'Exception'),
('done', 'Done'),
], 'Status', readonly=True, copy=False),
'error_msg' : fields.text('Error Message', readonly=True)
}
_defaults = {
'state': lambda *a: 'todo',
'date': False,
}
@api.cr_uid_ids_context
def button_draft(self, cr, uid, workitem_ids, context=None):
for wi in self.browse(cr, uid, workitem_ids, context=context):
if wi.state in ('exception', 'cancelled'):
self.write(cr, uid, [wi.id], {'state':'todo'}, context=context)
return True
@api.cr_uid_ids_context
def button_cancel(self, cr, uid, workitem_ids, context=None):
for wi in self.browse(cr, uid, workitem_ids, context=context):
if wi.state in ('todo','exception'):
self.write(cr, uid, [wi.id], {'state':'cancelled'}, context=context)
return True
def _process_one(self, cr, uid, workitem, context=None):
if workitem.state != 'todo':
return False
activity = workitem.activity_id
proxy = self.pool[workitem.object_id.model]
object_id = proxy.browse(cr, uid, workitem.res_id, context=context)
eval_context = {
'activity': activity,
'workitem': workitem,
'object': object_id,
'resource': object_id,
'transitions': activity.to_ids,
're': re,
}
try:
condition = activity.condition
campaign_mode = workitem.campaign_id.mode
if condition:
if not eval(condition, eval_context):
if activity.keep_if_condition_not_met:
workitem.write({'state': 'cancelled'})
else:
workitem.unlink()
return
result = True
if campaign_mode in ('manual', 'active'):
Activities = self.pool.get('marketing.campaign.activity')
result = Activities.process(cr, uid, activity.id, workitem.id,
context=context)
values = dict(state='done')
if not workitem.date:
values['date'] = datetime.now().strftime(DT_FMT)
workitem.write(values)
if result:
# process _chain
workitem.refresh() # reload
date = datetime.strptime(workitem.date, DT_FMT)
for transition in activity.to_ids:
if transition.trigger == 'cosmetic':
continue
launch_date = False
if transition.trigger == 'auto':
launch_date = date
elif transition.trigger == 'time':
launch_date = date + transition._delta()
if launch_date:
launch_date = launch_date.strftime(DT_FMT)
values = {
'date': launch_date,
'segment_id': workitem.segment_id.id,
'activity_id': transition.activity_to_id.id,
'partner_id': workitem.partner_id.id,
'res_id': workitem.res_id,
'state': 'todo',
}
wi_id = self.create(cr, uid, values, context=context)
# Now, depending on the trigger and the campaign mode
# we know whether we must run the newly created workitem.
#
# rows = transition trigger \ colums = campaign mode
#
# test test_realtime manual normal (active)
# time Y N N N
# cosmetic N N N N
# auto Y Y N Y
#
run = (transition.trigger == 'auto' \
and campaign_mode != 'manual') \
or (transition.trigger == 'time' \
and campaign_mode == 'test')
if run:
new_wi = self.browse(cr, uid, wi_id, context)
self._process_one(cr, uid, new_wi, context)
except Exception:
tb = "".join(format_exception(*exc_info()))
workitem.write({'state': 'exception', 'error_msg': tb})
@api.cr_uid_ids_context
def process(self, cr, uid, workitem_ids, context=None):
for wi in self.browse(cr, uid, workitem_ids, context=context):
self._process_one(cr, uid, wi, context=context)
return True
def process_all(self, cr, uid, camp_ids=None, context=None):
camp_obj = self.pool.get('marketing.campaign')
if camp_ids is None:
camp_ids = camp_obj.search(cr, uid, [('state','=','running')], context=context)
for camp in camp_obj.browse(cr, uid, camp_ids, context=context):
if camp.mode == 'manual':
# manual states are not processed automatically
continue
while True:
domain = [('campaign_id', '=', camp.id), ('state', '=', 'todo'), ('date', '!=', False)]
if camp.mode in ('test_realtime', 'active'):
domain += [('date','<=', time.strftime('%Y-%m-%d %H:%M:%S'))]
workitem_ids = self.search(cr, uid, domain, context=context)
if not workitem_ids:
break
self.process(cr, uid, workitem_ids, context=context)
return True
def preview(self, cr, uid, ids, context=None):
res = {}
wi_obj = self.browse(cr, uid, ids[0], context=context)
if wi_obj.activity_id.type == 'email':
view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'email_template', 'email_template_preview_form')
res = {
'name': _('Email Preview'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'email_template.preview',
'view_id': False,
'context': context,
'views': [(view_id and view_id[1] or 0, 'form')],
'type': 'ir.actions.act_window',
'target': 'new',
'nodestroy':True,
'context': "{'template_id':%d,'default_res_id':%d}"%
(wi_obj.activity_id.email_template_id.id,
wi_obj.res_id)
}
elif wi_obj.activity_id.type == 'report':
datas = {
'ids': [wi_obj.res_id],
'model': wi_obj.object_id.model
}
res = {
'type' : 'ir.actions.report.xml',
'report_name': wi_obj.activity_id.report_id.report_name,
'datas' : datas,
}
else:
raise osv.except_osv(_('No preview'),_('The current step for this item has no email or report to preview.'))
return res
class email_template(osv.osv):
_inherit = "email.template"
_defaults = {
'model_id': lambda obj, cr, uid, context: context.get('object_id',False),
}
# TODO: add constraint to prevent disabling / disapproving an email account used in a running campaign
class report_xml(osv.osv):
_inherit = 'ir.actions.report.xml'
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
if context is None:
context = {}
object_id = context.get('object_id')
if object_id:
model = self.pool.get('ir.model').browse(cr, uid, object_id, context=context).model
args.append(('model', '=', model))
return super(report_xml, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=count)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
gandalfcode/gandalf | examples/example09.py | 1 | 1749 | #==============================================================================
# example09.py
# Create initial conditions for pure N-body simulation inside the python
# script, and then run the simulation to completion while plotting results.
#==============================================================================
from gandalf.analysis.facade import *
import numpy as np
import time
# Create empty numpy arrays for setting star initial conditions
Nstar = 3
x = np.zeros(Nstar)
y = np.zeros(Nstar)
vx = np.zeros(Nstar)
vy = np.zeros(Nstar)
m = np.zeros(Nstar)
h = 0.000001*np.ones(Nstar)
# Set values for each star individually (Note all velocities initially zero)
m[0] = 3.0; x[0] = 1.0; y[0] = 3.0
m[1] = 4.0; x[1] = -2.0; y[1] = -1.0
m[2] = 5.0; x[2] = 1.0; y[2] = -1.0
# Create new 1D simulation object and set parameters
sim = newsim(ndim=2,sim='nbody')
sim.SetParam('ic','python')
sim.SetParam('nbody','hermite4ts')
sim.SetParam('sub_systems',0)
sim.SetParam('Npec',3)
sim.SetParam('Nlevels',1)
sim.SetParam('Nstar',Nstar)
sim.SetParam('tend',80.0)
sim.SetParam('dt_snap',1.0)
sim.SetParam('noutputstep',128)
sim.SetParam('ndiagstep',2048)
sim.SetParam('dimensionless',1)
sim.SetParam('run_id','BURRAU1')
sim.SetParam('out_file_form','su')
# Call setup routines and import particle data
sim.PreSetupForPython()
sim.ImportArray(x,'x','star')
sim.ImportArray(y,'y','star')
sim.ImportArray(vx,'vx','star')
sim.ImportArray(vy,'vy','star')
sim.ImportArray(m,'m','star')
sim.ImportArray(h,'h','star')
sim.SetupSimulation()
# Plot the density of all particles near the shock
plot("x","y",type="star")
limit("x",-30.0,30.0,window="all")
limit("y",-20.0,40.0,window="all")
# Run simulation and save plot to file
run()
block()
| gpl-2.0 |
swdream/neutron | neutron/tests/unit/api/v2/test_resource.py | 28 | 14954 | # Copyright (c) 2012 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import oslo_i18n
from webob import exc
import webtest
from neutron.api.v2 import resource as wsgi_resource
from neutron.common import exceptions as n_exc
from neutron import context
from neutron.tests import base
from neutron import wsgi
class RequestTestCase(base.BaseTestCase):
def setUp(self):
super(RequestTestCase, self).setUp()
self.req = wsgi_resource.Request({'foo': 'bar'})
def test_content_type_missing(self):
request = wsgi.Request.blank('/tests/123', method='POST')
request.body = b"<body />"
self.assertIsNone(request.get_content_type())
def test_content_type_with_charset(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Content-Type"] = "application/json; charset=UTF-8"
result = request.get_content_type()
self.assertEqual(result, "application/json")
def test_content_type_from_accept(self):
content_type = 'application/json'
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = content_type
result = request.best_match_content_type()
self.assertEqual(result, content_type)
def test_content_type_from_accept_best(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = "application/json"
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = ("application/json; q=0.3, "
"application/xml; q=0.9")
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_content_type_from_query_extension(self):
request = wsgi.Request.blank('/tests/123.json')
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
request = wsgi.Request.blank('/tests/123.invalid')
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_content_type_accept_and_query_extension(self):
request = wsgi.Request.blank('/tests/123.json')
request.headers["Accept"] = "application/xml"
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_content_type_accept_default(self):
request = wsgi.Request.blank('/tests/123.unsupported')
request.headers["Accept"] = "application/unsupported1"
result = request.best_match_content_type()
self.assertEqual(result, "application/json")
def test_context_with_neutron_context(self):
ctxt = context.Context('fake_user', 'fake_tenant')
self.req.environ['neutron.context'] = ctxt
self.assertEqual(self.req.context, ctxt)
def test_context_without_neutron_context(self):
self.assertTrue(self.req.context.is_admin)
def test_request_context_elevated(self):
user_context = context.Context(
'fake_user', 'fake_project', admin=False)
self.assertFalse(user_context.is_admin)
admin_context = user_context.elevated()
self.assertFalse(user_context.is_admin)
self.assertTrue(admin_context.is_admin)
self.assertNotIn('admin', user_context.roles)
self.assertIn('admin', admin_context.roles)
def test_best_match_language(self):
# Test that we are actually invoking language negotiation by webop
request = wsgi.Request.blank('/')
oslo_i18n.get_available_languages = mock.MagicMock()
oslo_i18n.get_available_languages.return_value = ['known-language',
'es', 'zh']
request.headers['Accept-Language'] = 'known-language'
language = request.best_match_language()
self.assertEqual(language, 'known-language')
# If the Accept-Leader is an unknown language, missing or empty,
# the best match locale should be None
request.headers['Accept-Language'] = 'unknown-language'
language = request.best_match_language()
self.assertIsNone(language)
request.headers['Accept-Language'] = ''
language = request.best_match_language()
self.assertIsNone(language)
request.headers.pop('Accept-Language')
language = request.best_match_language()
self.assertIsNone(language)
class ResourceTestCase(base.BaseTestCase):
@staticmethod
def _get_deserializer():
return wsgi.JSONDeserializer()
def test_unmapped_neutron_error_with_json(self):
msg = u'\u7f51\u7edc'
class TestException(n_exc.NeutronException):
message = msg
expected_res = {'body': {
'NeutronError': {
'type': 'TestException',
'message': msg,
'detail': ''}}}
controller = mock.MagicMock()
controller.test.side_effect = TestException()
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPInternalServerError.code)
self.assertEqual(wsgi.JSONDeserializer().deserialize(res.body),
expected_res)
@mock.patch('oslo_i18n.translate')
def test_unmapped_neutron_error_localized(self, mock_translation):
msg_translation = 'Translated error'
mock_translation.return_value = msg_translation
msg = _('Unmapped error')
class TestException(n_exc.NeutronException):
message = msg
controller = mock.MagicMock()
controller.test.side_effect = TestException()
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPInternalServerError.code)
self.assertIn(msg_translation,
str(wsgi.JSONDeserializer().deserialize(res.body)))
def test_mapped_neutron_error_with_json(self):
msg = u'\u7f51\u7edc'
class TestException(n_exc.NeutronException):
message = msg
expected_res = {'body': {
'NeutronError': {
'type': 'TestException',
'message': msg,
'detail': ''}}}
controller = mock.MagicMock()
controller.test.side_effect = TestException()
faults = {TestException: exc.HTTPGatewayTimeout}
resource = webtest.TestApp(wsgi_resource.Resource(controller,
faults=faults))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPGatewayTimeout.code)
self.assertEqual(wsgi.JSONDeserializer().deserialize(res.body),
expected_res)
@mock.patch('oslo_i18n.translate')
def test_mapped_neutron_error_localized(self, mock_translation):
msg_translation = 'Translated error'
mock_translation.return_value = msg_translation
msg = _('Unmapped error')
class TestException(n_exc.NeutronException):
message = msg
controller = mock.MagicMock()
controller.test.side_effect = TestException()
faults = {TestException: exc.HTTPGatewayTimeout}
resource = webtest.TestApp(wsgi_resource.Resource(controller,
faults=faults))
environ = {'wsgiorg.routing_args': (None, {'action': 'test',
'format': 'json'})}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPGatewayTimeout.code)
self.assertIn(msg_translation,
str(wsgi.JSONDeserializer().deserialize(res.body)))
@staticmethod
def _make_request_with_side_effect(side_effect):
controller = mock.MagicMock()
controller.test.side_effect = side_effect
resource = webtest.TestApp(wsgi_resource.Resource(controller))
routing_args = {'action': 'test'}
environ = {'wsgiorg.routing_args': (None, routing_args)}
res = resource.get('', extra_environ=environ, expect_errors=True)
return res
def test_http_error(self):
res = self._make_request_with_side_effect(exc.HTTPGatewayTimeout())
# verify that the exception structure is the one expected
# by the python-neutronclient
self.assertEqual(exc.HTTPGatewayTimeout().explanation,
res.json['NeutronError']['message'])
self.assertEqual('HTTPGatewayTimeout',
res.json['NeutronError']['type'])
self.assertEqual('', res.json['NeutronError']['detail'])
self.assertEqual(exc.HTTPGatewayTimeout.code, res.status_int)
def test_unhandled_error(self):
expected_res = {'body': {'NeutronError':
{'detail': '',
'message': _(
'Request Failed: internal server '
'error while processing your request.'),
'type': 'HTTPInternalServerError'}}}
res = self._make_request_with_side_effect(side_effect=Exception())
self.assertEqual(exc.HTTPInternalServerError.code,
res.status_int)
self.assertEqual(expected_res,
self._get_deserializer().deserialize(res.body))
def test_not_implemented_error(self):
expected_res = {'body': {'NeutronError':
{'detail': '',
'message': _(
'The server has either erred or is '
'incapable of performing the requested '
'operation.'),
'type': 'HTTPNotImplemented'}}}
res = self._make_request_with_side_effect(exc.HTTPNotImplemented())
self.assertEqual(exc.HTTPNotImplemented.code, res.status_int)
self.assertEqual(expected_res,
self._get_deserializer().deserialize(res.body))
def test_status_200(self):
controller = mock.MagicMock()
controller.test = lambda request: {'foo': 'bar'}
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test'})}
res = resource.get('', extra_environ=environ)
self.assertEqual(res.status_int, 200)
def test_status_204(self):
controller = mock.MagicMock()
controller.test = lambda request: {'foo': 'bar'}
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'delete'})}
res = resource.delete('', extra_environ=environ)
self.assertEqual(res.status_int, 204)
def _test_error_log_level(self, expected_webob_exc, expect_log_info=False,
use_fault_map=True, exc_raised=None):
if not exc_raised:
class TestException(n_exc.NeutronException):
message = 'Test Exception'
exc_raised = TestException
controller = mock.MagicMock()
controller.test.side_effect = exc_raised()
faults = {exc_raised: expected_webob_exc} if use_fault_map else {}
resource = webtest.TestApp(wsgi_resource.Resource(controller, faults))
environ = {'wsgiorg.routing_args': (None, {'action': 'test'})}
with mock.patch.object(wsgi_resource, 'LOG') as log:
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, expected_webob_exc.code)
self.assertEqual(expect_log_info, log.info.called)
self.assertNotEqual(expect_log_info, log.exception.called)
def test_4xx_error_logged_info_level(self):
self._test_error_log_level(exc.HTTPNotFound, expect_log_info=True)
def test_non_4xx_error_logged_exception_level(self):
self._test_error_log_level(exc.HTTPServiceUnavailable,
expect_log_info=False)
def test_unmapped_error_logged_exception_level(self):
self._test_error_log_level(exc.HTTPInternalServerError,
expect_log_info=False, use_fault_map=False)
def test_webob_4xx_logged_info_level(self):
self._test_error_log_level(exc.HTTPNotFound,
use_fault_map=False, expect_log_info=True,
exc_raised=exc.HTTPNotFound)
def test_webob_5xx_logged_info_level(self):
self._test_error_log_level(exc.HTTPServiceUnavailable,
use_fault_map=False, expect_log_info=False,
exc_raised=exc.HTTPServiceUnavailable)
def test_no_route_args(self):
controller = mock.MagicMock()
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {}
res = resource.get('', extra_environ=environ, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPInternalServerError.code)
def test_post_with_body(self):
controller = mock.MagicMock()
controller.test = lambda request, body: {'foo': 'bar'}
resource = webtest.TestApp(wsgi_resource.Resource(controller))
environ = {'wsgiorg.routing_args': (None, {'action': 'test'})}
res = resource.post('', params='{"key": "val"}',
extra_environ=environ)
self.assertEqual(res.status_int, 200)
| apache-2.0 |
gunzy83/ansible-modules-extras | monitoring/datadog_event.py | 33 | 5325 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Author: Artūras 'arturaz' Šlajus <[email protected]>
# Author: Naoya Nakazawa <[email protected]>
#
# This module is proudly sponsored by iGeolise (www.igeolise.com) and
# Tiny Lab Productions (www.tinylabproductions.com).
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Import Datadog
try:
from datadog import initialize, api
HAS_DATADOG = True
except:
HAS_DATADOG = False
DOCUMENTATION = '''
---
module: datadog_event
short_description: Posts events to DataDog service
description:
- "Allows to post events to DataDog (www.datadoghq.com) service."
- "Uses http://docs.datadoghq.com/api/#events API."
version_added: "1.3"
author:
- "Artūras `arturaz` Šlajus (@arturaz)"
- "Naoya Nakazawa (@n0ts)"
notes: []
requirements: []
options:
api_key:
description: ["Your DataDog API key."]
required: true
default: null
app_key:
description: ["Your DataDog app key."]
required: true
version_added: "2.2"
title:
description: ["The event title."]
required: true
default: null
text:
description: ["The body of the event."]
required: true
default: null
date_happened:
description:
- POSIX timestamp of the event.
- Default value is now.
required: false
default: now
priority:
description: ["The priority of the event."]
required: false
default: normal
choices: [normal, low]
tags:
description: ["Comma separated list of tags to apply to the event."]
required: false
default: null
alert_type:
description: ["Type of alert."]
required: false
default: info
choices: ['error', 'warning', 'info', 'success']
aggregation_key:
description: ["An arbitrary string to use for aggregation."]
required: false
default: null
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
'''
EXAMPLES = '''
# Post an event with low priority
datadog_event: title="Testing from ansible" text="Test!" priority="low"
api_key: "9775a026f1ca7d1c6c5af9d94d9595a4"
app_key: "j4JyCYfefWHhgFgiZUqRm63AXHNZQyPGBfJtAzmN"
# Post an event with several tags
datadog_event: title="Testing from ansible" text="Test!"
api_key: "9775a026f1ca7d1c6c5af9d94d9595a4"
app_key: "j4JyCYfefWHhgFgiZUqRm63AXHNZQyPGBfJtAzmN"
tags=aa,bb,#host:{{ inventory_hostname }}
'''
# Import Datadog
def main():
module = AnsibleModule(
argument_spec=dict(
api_key=dict(required=True, no_log=True),
app_key=dict(required=True, no_log=True),
title=dict(required=True),
text=dict(required=True),
date_happened=dict(required=False, default=None, type='int'),
priority=dict(
required=False, default='normal', choices=['normal', 'low']
),
tags=dict(required=False, default=None, type='list'),
alert_type=dict(
required=False, default='info',
choices=['error', 'warning', 'info', 'success']
),
aggregation_key=dict(required=False, default=None),
validate_certs = dict(default='yes', type='bool'),
)
)
# Prepare Datadog
if not HAS_DATADOG:
module.fail_json(msg='datadogpy required for this module')
options = {
'api_key': module.params['api_key'],
'app_key': module.params['app_key']
}
initialize(**options)
_post_event(module)
def _post_event(module):
try:
msg = api.Event.create(title=module.params['title'],
text=module.params['text'],
tags=module.params['tags'],
priority=module.params['priority'],
alert_type=module.params['alert_type'],
aggregation_key=module.params['aggregation_key'],
source_type_name='ansible')
if msg['status'] != 'ok':
module.fail_json(msg=msg)
module.exit_json(changed=True, msg=msg)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
ciudadanointeligente/votainteligente-portal-electoral | popular_proposal/rest_api.py | 2 | 1890 | from rest_framework.serializers import (HyperlinkedModelSerializer,
JSONField,
StringRelatedField)
from rest_framework.viewsets import ReadOnlyModelViewSet
from popular_proposal.models import PopularProposal, Commitment
from elections.models import Candidate
class ProposalSerializer(HyperlinkedModelSerializer):
data = JSONField()
proposer = StringRelatedField()
class Meta:
model = PopularProposal
fields = ('id','title', 'slug', 'get_absolute_url', 'data', 'proposer','created', 'clasification','is_local_meeting','nro_supports')
class ProposalViewSet(ReadOnlyModelViewSet):
queryset = PopularProposal.objects.all()
serializer_class = ProposalSerializer
def get_queryset(self):
queryset = super(ProposalViewSet, self).get_queryset()
username = self.request.query_params.get('proposer', None)
if username is not None:
queryset = queryset.filter(proposer__username=username)
clasification = self.request.query_params.get('clasification', None)
if clasification is not None:
queryset = queryset.filter(clasification=clasification)
return queryset
class CommitmentsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Commitment
fields = ('id','proposal','candidate', 'detail', 'commited', 'get_absolute_url')
class CommitmentViewSet(ReadOnlyModelViewSet):
queryset = Commitment.objects.all()
serializer_class = CommitmentsSerializer
class CandidateSerializer(HyperlinkedModelSerializer):
class Meta:
model = Candidate
fields = ('id','name', 'get_absolute_url', 'commitments')
class CandidateViewSet(ReadOnlyModelViewSet):
queryset = Candidate.objects.all()
serializer_class = CandidateSerializer
pagination_class = None | gpl-3.0 |
ycl2045/nova-master | nova/scheduler/filters/isolated_hosts_filter.py | 13 | 3321 | # Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova.scheduler import filters
isolated_opts = [
cfg.ListOpt('isolated_images',
default=[],
help='Images to run on isolated host'),
cfg.ListOpt('isolated_hosts',
default=[],
help='Host reserved for specific images'),
cfg.BoolOpt('restrict_isolated_hosts_to_isolated_images',
default=True,
help='Whether to force isolated hosts to run only isolated '
'images'),
]
CONF = cfg.CONF
CONF.register_opts(isolated_opts)
class IsolatedHostsFilter(filters.BaseHostFilter):
"""Keep specified images to selected hosts."""
# The configuration values do not change within a request
run_filter_once_per_request = True
def host_passes(self, host_state, filter_properties):
"""Result Matrix with 'restrict_isolated_hosts_to_isolated_images' set
to True:
| isolated_image | non_isolated_image
-------------+----------------+-------------------
iso_host | True | False
non_iso_host | False | True
Result Matrix with 'restrict_isolated_hosts_to_isolated_images' set
to False:
| isolated_image | non_isolated_image
-------------+----------------+-------------------
iso_host | True | True
non_iso_host | False | True
"""
# If the configuration does not list any hosts, the filter will always
# return True, assuming a configuration error, so letting all hosts
# through.
isolated_hosts = CONF.isolated_hosts
isolated_images = CONF.isolated_images
restrict_isolated_hosts_to_isolated_images = (CONF.
restrict_isolated_hosts_to_isolated_images)
if not isolated_images:
# As there are no images to match, return True if the filter is
# not restrictive otherwise return False if the host is in the
# isolation list.
return ((not restrict_isolated_hosts_to_isolated_images) or
(host_state.host not in isolated_hosts))
spec = filter_properties.get('request_spec', {})
props = spec.get('instance_properties', {})
image_ref = props.get('image_ref')
image_isolated = image_ref in isolated_images
host_isolated = host_state.host in isolated_hosts
if restrict_isolated_hosts_to_isolated_images:
return (image_isolated == host_isolated)
else:
return (not image_isolated) or host_isolated
| apache-2.0 |
EricSB/nupic | tests/unit/nupic/research/spatial_pooler_boost_test.py | 15 | 11879 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import time
import numpy
import unittest2 as unittest
from nupic.support.unittesthelpers.algorithm_test_helpers \
import CreateSP
from nupic.bindings.math import GetNTAReal
uintType = "uint32"
# set a single seed for running both implementations
SEED = int((time.time()%10000)*10)
def _computeOverlap(x, y):
"""
Given two binary arrays, compute their overlap. The overlap is the number
of bits where x[i] and y[i] are both 1
"""
return ((x + y) == 2).sum()
def _areAllSDRsUnique(sdrDict):
"""Return True iff all the SDR's in the dict are unique."""
for k1, v1 in sdrDict.iteritems():
for k2, v2 in sdrDict.iteritems():
# Return false if two different keys have identical SDR's
if (k2 != k1) and ((v1 == v2).sum() == v1.size):
return False
return True
class SpatialPoolerBoostTest(unittest.TestCase):
"""
Test boosting.
The test is constructed as follows: we construct a set of 5 known inputs. Two
of the input patterns have 50% overlap while all other combinations have 0%
overlap. Each input pattern has 20 bits on to ensure reasonable overlap with
almost all columns.
SP parameters: The SP is set to have 600 columns with 10% output sparsity.
This ensures that the 5 inputs cannot use up all the columns. Yet we still can
have a reasonable number of winning columns at each step in order to test
overlap properties. boostStrength is set to 10 so that some boosted columns are
guaranteed to win eventually but not necessarily quickly. potentialPct is set
to 0.9 to ensure all columns have at least some overlap with at least one
input bit. Thus, when sufficiently boosted, every column should become a
winner at some point. We set permanence increment and decrement to 0 so that
winning columns don't change unless they have been boosted.
Learning is OFF for Phase 1 & 4 and ON for Phase 2 & 3
Phase 1: Run spatial pooler on the dataset with learning off to get a baseline
The boosting factors should be all ones in this phase. A significant fraction
of the columns will not be used at all. There will be significant overlap
between the first two inputs.
Phase 2: Learning is on over the next 10 iterations. During this phase,
columns that are active frequently will have low boost factors, and columns
that are not active enough will have high boost factors. All columns should
be active at some point in phase 2.
Phase 3: Run one more batch on with learning On. Because of the artificially
induced thrashing behavior in this test due to boosting, all the inputs should
now have pretty distinct patterns.
Phase 4: Run spatial pooler with learning off. Make sure boosting factors
do not change when learning is off
"""
def setUp(self):
"""
Set various constants. Create the input patterns and the spatial pooler
"""
self.inputSize = 90
self.columnDimensions = 600
# Create a set of input vectors, x
# B,C,D don't overlap at all with other patterns
self.x = numpy.zeros((5, self.inputSize), dtype=uintType)
self.x[0, 0:20] = 1 # Input pattern A
self.x[1, 10:30] = 1 # Input pattern A' (half the bits overlap with A)
self.x[2, 30:50] = 1 # Input pattern B (no overlap with others)
self.x[3, 50:70] = 1 # Input pattern C (no overlap with others)
self.x[4, 70:90] = 1 # Input pattern D (no overlap with others)
# For each column, this will contain the last iteration number where that
# column was a winner
self.winningIteration = numpy.zeros(self.columnDimensions)
# For each input vector i, lastSDR[i] contains the most recent SDR output
# by the SP.
self.lastSDR = {}
self.spImplementation = "None"
self.sp = None
# Setup the SP creation parameters we will use
self.params = {
'inputDimensions': [self.inputSize],
'columnDimensions': [self.columnDimensions],
'potentialRadius': self.inputSize,
'potentialPct': 0.9,
'globalInhibition': True,
'numActiveColumnsPerInhArea': 60,
'synPermActiveInc': 0.0,
'synPermInactiveDec': 0.0,
'dutyCyclePeriod': 10,
'boostStrength': 10.0,
'seed': SEED,
}
print "SP seed set to:", self.params['seed']
def debugPrint(self):
"""
Helpful debug print statements while debugging this test.
"""
activeDutyCycle = numpy.zeros(self.columnDimensions, dtype=GetNTAReal())
self.sp.getActiveDutyCycles(activeDutyCycle)
boost = numpy.zeros(self.columnDimensions, dtype=GetNTAReal())
self.sp.getBoostFactors(boost)
print "\n--------- ITERATION", (
self.sp.getIterationNum() ),"-----------------------"
print "SP implementation:", self.spImplementation
print "Learning iteration:",
print "Max/min active duty cycle:", (
activeDutyCycle.max(), activeDutyCycle.min() )
print "Average non-zero active duty cycle:", (
activeDutyCycle[activeDutyCycle>0].mean() )
print "Active duty cycle", activeDutyCycle
print
print "Boost factor for sp:", boost
print
print "Last winning iteration for each column"
print self.winningIteration
print "Number of columns that have won at some point:", (
self.columnDimensions - (self.winningIteration==0).sum() )
def verifySDRProperties(self):
"""
Verify that all SDRs have the properties desired for this test.
The bounds for checking overlap are set fairly loosely here since there is
some variance due to randomness and the artificial parameters used in this
test.
"""
# Verify that all SDR's are unique
self.assertTrue(_areAllSDRsUnique(self.lastSDR), "All SDR's are not unique")
# Verify that the first two SDR's have some overlap.
self.assertGreater(_computeOverlap(self.lastSDR[0], self.lastSDR[1]), 9,
"First two SDR's don't overlap much")
# Verify the last three SDR's have low overlap with everyone else.
for i in [2, 3, 4]:
for j in range(5):
if (i!=j):
self.assertLess(_computeOverlap(self.lastSDR[i], self.lastSDR[j]),
18, "One of the last three SDRs has high overlap")
def boostTestPhase1(self):
y = numpy.zeros(self.columnDimensions, dtype = uintType)
# Do one batch through the input patterns while learning is Off
for idx, v in enumerate(self.x):
y.fill(0)
self.sp.compute(v, False, y)
self.winningIteration[y.nonzero()[0]] = self.sp.getIterationLearnNum()
self.lastSDR[idx] = y.copy()
# The boost factor for all columns should be at 1.
boost = numpy.zeros(self.columnDimensions, dtype = GetNTAReal())
self.sp.getBoostFactors(boost)
self.assertEqual((boost==1).sum(), self.columnDimensions,
"Boost factors are not all 1")
# At least half of the columns should have never been active.
self.assertGreaterEqual((self.winningIteration==0).sum(),
self.columnDimensions/2, "More than half of the columns have been active")
self.verifySDRProperties()
def boostTestPhase2(self):
y = numpy.zeros(self.columnDimensions, dtype = uintType)
# Do 9 training batch through the input patterns
for _ in range(10):
for idx, v in enumerate(self.x):
y.fill(0)
self.sp.compute(v, True, y)
self.winningIteration[y.nonzero()[0]] = self.sp.getIterationLearnNum()
self.lastSDR[idx] = y.copy()
# All the never-active columns should have duty cycle of 0
dutyCycles = numpy.zeros(self.columnDimensions, dtype = GetNTAReal())
self.sp.getActiveDutyCycles(dutyCycles)
self.assertEqual(dutyCycles[self.winningIteration == 0].sum(), 0,
"Inactive columns have positive duty cycle.")
boost = numpy.zeros(self.columnDimensions, dtype=GetNTAReal())
self.sp.getBoostFactors(boost)
self.assertLessEqual(numpy.max(boost[numpy.where(dutyCycles>0.1)]), 1.0,
"Strongly active columns have high boost factors")
self.assertGreaterEqual(numpy.min(boost[numpy.where(dutyCycles<0.1)]), 1.0,
"Weakly active columns have low boost factors")
# By now, every column should have been sufficiently boosted to win at least
# once. The number of columns that have never won should now be 0
numLosersAfter = (self.winningIteration == 0).sum()
self.assertEqual(numLosersAfter, 0)
# Because of the artificially induced thrashing, even the first two patterns
# should have low overlap. Verify that the first two SDR's now have little
# overlap
self.assertLess(_computeOverlap(self.lastSDR[0], self.lastSDR[1]), 7,
"First two SDR's overlap significantly when they "
"shouldn't")
def boostTestPhase3(self):
# Do one more training batches through the input patterns
y = numpy.zeros(self.columnDimensions, dtype = uintType)
for idx, v in enumerate(self.x):
y.fill(0)
self.sp.compute(v, True, y)
self.winningIteration[y.nonzero()[0]] = self.sp.getIterationLearnNum()
self.lastSDR[idx] = y.copy()
# By now, every column should have been sufficiently boosted to win at least
# once. The number of columns that have never won should now be 0
numLosersAfter = (self.winningIteration==0).sum()
self.assertEqual(numLosersAfter, 0)
# Because of the artificially induced thrashing, even the first two patterns
# should have low overlap. Verify that the first two SDR's now have little
# overlap
self.assertLess(_computeOverlap(self.lastSDR[0], self.lastSDR[1]), 7,
"First two SDR's overlap significantly when they "
"shouldn't")
def boostTestPhase4(self):
boostAtBeg = numpy.zeros(self.columnDimensions, dtype=GetNTAReal())
self.sp.getBoostFactors(boostAtBeg)
# Do one more iteration through the input patterns with learning OFF
y = numpy.zeros(self.columnDimensions, dtype=uintType)
for _, v in enumerate(self.x):
y.fill(0)
self.sp.compute(v, False, y)
boost = numpy.zeros(self.columnDimensions, dtype=GetNTAReal())
self.sp.getBoostFactors(boost)
self.assertEqual(boost.sum(), boostAtBeg.sum(),
"Boost factors changed when learning is off")
def boostTestLoop(self, imp):
"""Main test loop."""
self.sp = CreateSP(imp, self.params)
self.spImplementation = imp
self.winningIteration.fill(0)
self.lastSDR = {}
self.boostTestPhase1()
self.boostTestPhase2()
self.boostTestPhase3()
self.boostTestPhase4()
def testBoostingPY(self):
self.boostTestLoop("py")
def testBoostingCPP(self):
self.boostTestLoop("cpp")
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
mrtnrdl/.macdots | scripts/bin/platform-tools/systrace/catapult/common/py_utils/py_utils/refactor/annotated_symbol/function_definition.py | 9 | 1301 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import symbol
from py_utils.refactor.annotated_symbol import base_symbol
__all__ = [
'Function',
]
class Function(base_symbol.AnnotatedSymbol):
# pylint: disable=abstract-class-not-used
@classmethod
def Annotate(cls, symbol_type, children):
if symbol_type != symbol.stmt:
return None
compound_statement = children[0]
if compound_statement.type != symbol.compound_stmt:
return None
statement = compound_statement.children[0]
if statement.type == symbol.funcdef:
return cls(statement.type, statement.children)
elif (statement.type == symbol.decorated and
statement.children[-1].type == symbol.funcdef):
return cls(statement.type, statement.children)
else:
return None
@property
def suite(self):
# TODO: Complete.
raise NotImplementedError()
def FindChild(self, snippet_type, **kwargs):
return self.suite.FindChild(snippet_type, **kwargs)
def FindChildren(self, snippet_type):
return self.suite.FindChildren(snippet_type)
def Cut(self, child):
self.suite.Cut(child)
def Paste(self, child):
self.suite.Paste(child)
| unlicense |
rgerkin/neuroConstruct | lib/jython/Lib/modjy/modjy_log.py | 109 | 2133 | ###
#
# Copyright Alan Kennedy.
#
# You may contact the copyright holder at this uri:
#
# http://www.xhaus.com/contact/modjy
#
# The licence under which this code is released is the Apache License v2.0.
#
# The terms and conditions of this license are listed in a file contained
# in the distribution that also contained this file, under the name
# LICENSE.txt.
#
# You may also read a copy of the license at the following web address.
#
# http://modjy.xhaus.com/LICENSE.txt
#
###
import java
import sys
DEBUG = 'debug'
INFO = 'info'
WARN = 'warn'
ERROR = 'error'
FATAL = 'fatal'
levels_dict = {}
ix = 0
for level in [DEBUG, INFO, WARN, ERROR, FATAL, ]:
levels_dict[level]=ix
ix += 1
class modjy_logger:
def __init__(self, context):
self.log_ctx = context
self.format_str = "%(lvl)s:\t%(msg)s"
self.log_level = levels_dict[DEBUG]
def _log(self, level, level_str, msg, exc):
if level >= self.log_level:
msg = self.format_str % {'lvl': level_str, 'msg': msg, }
if exc:
# java.lang.System.err.println(msg, exc)
self.log_ctx.log(msg, exc)
else:
# java.lang.System.err.println(msg)
self.log_ctx.log(msg)
def debug(self, msg, exc=None):
self._log(0, DEBUG, msg, exc)
def info(self, msg, exc=None):
self._log(1, INFO, msg, exc)
def warn(self, msg, exc=None):
self._log(2, WARN, msg, exc)
def error(self, msg, exc=None):
self._log(3, ERROR, msg, exc)
def fatal(self, msg, exc=None):
self._log(4, FATAL, msg, exc)
def set_log_level(self, level_string):
try:
self.log_level = levels_dict[level_string]
except KeyError:
raise BadParameter("Invalid log level: '%s'" % level_string)
def set_log_format(self, format_string):
# BUG! Format string never actually used in this function.
try:
self._log(debug, "This is a log formatting test", None)
except KeyError:
raise BadParameter("Bad format string: '%s'" % format_string)
| gpl-2.0 |
BizzCloud/PosBox | addons/mrp_repair/__openerp__.py | 65 | 2540 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Repairs Management',
'version': '1.0',
'category': 'Manufacturing',
'description': """
The aim is to have a complete module to manage all products repairs.
====================================================================
The following topics should be covered by this module:
------------------------------------------------------
* Add/remove products in the reparation
* Impact for stocks
* Invoicing (products and/or services)
* Warranty concept
* Repair quotation report
* Notes for the technician and for the final customer
""",
'author': 'OpenERP SA',
'images': ['images/repair_order.jpeg'],
'depends': ['mrp', 'sale', 'account'],
'data': [
'security/ir.model.access.csv',
'security/mrp_repair_security.xml',
'mrp_repair_data.xml',
'mrp_repair_sequence.xml',
'wizard/mrp_repair_cancel_view.xml',
'wizard/mrp_repair_make_invoice_view.xml',
'mrp_repair_view.xml',
'mrp_repair_workflow.xml',
'mrp_repair_report.xml',
'views/report_mrprepairorder.xml',
],
'demo': ['mrp_repair_demo.yml'],
'test': ['test/mrp_repair_users.yml',
'test/test_mrp_repair_noneinv.yml',
'test/test_mrp_repair_b4inv.yml',
'test/test_mrp_repair_afterinv.yml',
'test/test_mrp_repair_cancel.yml',
'test/test_mrp_repair_fee.yml',
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
raphaelmerx/django-rest-framework | rest_framework/utils/field_mapping.py | 39 | 9402 | """
Helper functions for mapping model fields to a dictionary of default
keyword arguments that should be used for their equivelent serializer fields.
"""
import inspect
from django.core import validators
from django.db import models
from django.utils.text import capfirst
from rest_framework.compat import clean_manytomany_helptext
from rest_framework.validators import UniqueValidator
NUMERIC_FIELD_TYPES = (
models.IntegerField, models.FloatField, models.DecimalField
)
class ClassLookupDict(object):
"""
Takes a dictionary with classes as keys.
Lookups against this object will traverses the object's inheritance
hierarchy in method resolution order, and returns the first matching value
from the dictionary or raises a KeyError if nothing matches.
"""
def __init__(self, mapping):
self.mapping = mapping
def __getitem__(self, key):
if hasattr(key, '_proxy_class'):
# Deal with proxy classes. Ie. BoundField behaves as if it
# is a Field instance when using ClassLookupDict.
base_class = key._proxy_class
else:
base_class = key.__class__
for cls in inspect.getmro(base_class):
if cls in self.mapping:
return self.mapping[cls]
raise KeyError('Class %s not found in lookup.' % base_class.__name__)
def __setitem__(self, key, value):
self.mapping[key] = value
def needs_label(model_field, field_name):
"""
Returns `True` if the label based on the model's verbose name
is not equal to the default label it would have based on it's field name.
"""
default_label = field_name.replace('_', ' ').capitalize()
return capfirst(model_field.verbose_name) != default_label
def get_detail_view_name(model):
"""
Given a model class, return the view name to use for URL relationships
that refer to instances of the model.
"""
return '%(model_name)s-detail' % {
'app_label': model._meta.app_label,
'model_name': model._meta.object_name.lower()
}
def get_field_kwargs(field_name, model_field):
"""
Creates a default instance of a basic non-relational field.
"""
kwargs = {}
validator_kwarg = list(model_field.validators)
# The following will only be used by ModelField classes.
# Gets removed for everything else.
kwargs['model_field'] = model_field
if model_field.verbose_name and needs_label(model_field, field_name):
kwargs['label'] = capfirst(model_field.verbose_name)
if model_field.help_text:
kwargs['help_text'] = model_field.help_text
max_digits = getattr(model_field, 'max_digits', None)
if max_digits is not None:
kwargs['max_digits'] = max_digits
decimal_places = getattr(model_field, 'decimal_places', None)
if decimal_places is not None:
kwargs['decimal_places'] = decimal_places
if isinstance(model_field, models.TextField):
kwargs['style'] = {'base_template': 'textarea.html'}
if isinstance(model_field, models.AutoField) or not model_field.editable:
# If this field is read-only, then return early.
# Further keyword arguments are not valid.
kwargs['read_only'] = True
return kwargs
if model_field.has_default() or model_field.blank or model_field.null:
kwargs['required'] = False
if model_field.null and not isinstance(model_field, models.NullBooleanField):
kwargs['allow_null'] = True
if model_field.blank and (isinstance(model_field, models.CharField) or
isinstance(model_field, models.TextField)):
kwargs['allow_blank'] = True
if model_field.choices:
# If this model field contains choices, then return early.
# Further keyword arguments are not valid.
kwargs['choices'] = model_field.choices
return kwargs
# Ensure that max_length is passed explicitly as a keyword arg,
# rather than as a validator.
max_length = getattr(model_field, 'max_length', None)
if max_length is not None and isinstance(model_field, models.CharField):
kwargs['max_length'] = max_length
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MaxLengthValidator)
]
# Ensure that min_length is passed explicitly as a keyword arg,
# rather than as a validator.
min_length = next((
validator.limit_value for validator in validator_kwarg
if isinstance(validator, validators.MinLengthValidator)
), None)
if min_length is not None and isinstance(model_field, models.CharField):
kwargs['min_length'] = min_length
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MinLengthValidator)
]
# Ensure that max_value is passed explicitly as a keyword arg,
# rather than as a validator.
max_value = next((
validator.limit_value for validator in validator_kwarg
if isinstance(validator, validators.MaxValueValidator)
), None)
if max_value is not None and isinstance(model_field, NUMERIC_FIELD_TYPES):
kwargs['max_value'] = max_value
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MaxValueValidator)
]
# Ensure that max_value is passed explicitly as a keyword arg,
# rather than as a validator.
min_value = next((
validator.limit_value for validator in validator_kwarg
if isinstance(validator, validators.MinValueValidator)
), None)
if min_value is not None and isinstance(model_field, NUMERIC_FIELD_TYPES):
kwargs['min_value'] = min_value
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MinValueValidator)
]
# URLField does not need to include the URLValidator argument,
# as it is explicitly added in.
if isinstance(model_field, models.URLField):
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.URLValidator)
]
# EmailField does not need to include the validate_email argument,
# as it is explicitly added in.
if isinstance(model_field, models.EmailField):
validator_kwarg = [
validator for validator in validator_kwarg
if validator is not validators.validate_email
]
# SlugField do not need to include the 'validate_slug' argument,
if isinstance(model_field, models.SlugField):
validator_kwarg = [
validator for validator in validator_kwarg
if validator is not validators.validate_slug
]
# IPAddressField do not need to include the 'validate_ipv46_address' argument,
if isinstance(model_field, models.GenericIPAddressField):
validator_kwarg = [
validator for validator in validator_kwarg
if validator is not validators.validate_ipv46_address
]
if getattr(model_field, 'unique', False):
validator = UniqueValidator(queryset=model_field.model._default_manager)
validator_kwarg.append(validator)
if validator_kwarg:
kwargs['validators'] = validator_kwarg
return kwargs
def get_relation_kwargs(field_name, relation_info):
"""
Creates a default instance of a flat relational field.
"""
model_field, related_model, to_many, has_through_model = relation_info
kwargs = {
'queryset': related_model._default_manager,
'view_name': get_detail_view_name(related_model)
}
if to_many:
kwargs['many'] = True
if has_through_model:
kwargs['read_only'] = True
kwargs.pop('queryset', None)
if model_field:
if model_field.verbose_name and needs_label(model_field, field_name):
kwargs['label'] = capfirst(model_field.verbose_name)
help_text = clean_manytomany_helptext(model_field.help_text)
if help_text:
kwargs['help_text'] = help_text
if not model_field.editable:
kwargs['read_only'] = True
kwargs.pop('queryset', None)
if kwargs.get('read_only', False):
# If this field is read-only, then return early.
# No further keyword arguments are valid.
return kwargs
if model_field.has_default() or model_field.blank or model_field.null:
kwargs['required'] = False
if model_field.null:
kwargs['allow_null'] = True
if model_field.validators:
kwargs['validators'] = model_field.validators
if getattr(model_field, 'unique', False):
validator = UniqueValidator(queryset=model_field.model._default_manager)
kwargs['validators'] = kwargs.get('validators', []) + [validator]
if to_many and not model_field.blank:
kwargs['allow_empty'] = False
return kwargs
def get_nested_relation_kwargs(relation_info):
kwargs = {'read_only': True}
if relation_info.to_many:
kwargs['many'] = True
return kwargs
def get_url_kwargs(model_field):
return {
'view_name': get_detail_view_name(model_field)
}
| bsd-2-clause |
sodafree/backend | build/ipython/build/lib.linux-i686-2.7/IPython/core/macro.py | 3 | 1942 | """Support for interactive macros in IPython"""
#*****************************************************************************
# Copyright (C) 2001-2005 Fernando Perez <[email protected]>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
import re
import sys
from IPython.utils import py3compat
from IPython.utils.encoding import DEFAULT_ENCODING
coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)")
class Macro(object):
"""Simple class to store the value of macros as strings.
Macro is just a callable that executes a string of IPython
input when called.
Args to macro are available in _margv list if you need them.
"""
def __init__(self,code):
"""store the macro value, as a single string which can be executed"""
lines = []
enc = None
for line in code.splitlines():
coding_match = coding_declaration.match(line)
if coding_match:
enc = coding_match.group(1)
else:
lines.append(line)
code = "\n".join(lines)
if isinstance(code, bytes):
code = code.decode(enc or DEFAULT_ENCODING)
self.value = code + '\n'
def __str__(self):
return py3compat.unicode_to_str(self.value)
def __unicode__(self):
return self.value
def __repr__(self):
return 'IPython.macro.Macro(%s)' % repr(self.value)
def __getstate__(self):
""" needed for safe pickling via %store """
return {'value': self.value}
def __add__(self, other):
if isinstance(other, Macro):
return Macro(self.value + other.value)
elif isinstance(other, basestring):
return Macro(self.value + other)
raise TypeError
| bsd-3-clause |
Sjc1000/PyRC | UI/Disabled/FriendsList.py | 1 | 2227 | #!/usr/bin/env python3
from gi.repository import Gtk, Gdk
import json
class FriendsList():
servers = {}
active_server = None
def __init__(self, MainWindow):
self.MainWindow = MainWindow
self.position = [8, 5, 1, 4]
def prebuild(self):
self.MainWindow.ui_plugins['UserList'].position = (8, 0, 1, 5)
return None
def build(self):
self.scroll_window = Gtk.ScrolledWindow()
self.list = Gtk.ListStore(str, str)
self.view = Gtk.TreeView(self.list)
self.view.set_activate_on_single_click(True)
self.view.set_hexpand(True)
self.view.connect('row-activated', self.clicked)
text_render = Gtk.CellRendererText()
username = Gtk.TreeViewColumn('Friends', text_render, text=0, foreground=1)
self.view.append_column(username)
self.scroll_window.add(self.view)
self.MainWindow.grid.attach(self.scroll_window, *self.position)
return None
def clicked(self, TreeView, TreePath, TreeViewColumn):
print('User list clicked')
return None
def add_friend(self, connection, nickname):
connection.send('MONITOR + ' + nickname)
self.servers[connection.server]['friends'][nickname] = {'iter': None, 'online': False}
if connection.server == self.active_server:
iter = self.list.append([nickname, 'grey'])
self.servers[connection.server]['friends'][nickname]['iter'] = iter
return None
def activate_path(self, server, channel, clicked=False):
self.active_server = server
#redraw
return None
def on376(self, connection, *junk):
with open('UI/friends.json', 'r') as ffile:
friends = json.loads(ffile.read())
if connection.server not in friends:
return None
self.servers[connection.server] = {'friends': {}}
for nickname in sorted(friends[connection.server]):
self.add_friend(connection, nickname)
connection.send('MONITOR s')
return None
def on730(self, connection, host, nickname, uhost):
if nickname == connection.nickname:
return None
print( uhost )
return None | gpl-2.0 |
JonatanAntoni/CMSIS_5 | CMSIS/DSP/PythonWrapper/testdsp2.py | 2 | 9453 | import cmsisdsp as dsp
import numpy as np
from scipy import signal
from scipy.fftpack import dct
import fixedpoint as f
from pyquaternion import Quaternion
import colorama
from colorama import init,Fore, Back, Style
import statsmodels.tsa.stattools
import scipy.spatial
init()
def printTitle(s):
print("\n" + Fore.GREEN + Style.BRIGHT + s + Style.RESET_ALL)
def printSubTitle(s):
print("\n" + Style.BRIGHT + s + Style.RESET_ALL)
def imToReal2D(a):
ar=np.zeros(np.array(a.shape) * [1,2])
ar[::,0::2]=a.real
ar[::,1::2]=a.imag
return(ar)
def realToIm2D(ar):
return(ar[::,0::2] + 1j * ar[::,1::2])
def normalize(a):
return(a/np.max(np.abs(a)))
def autocorr(x):
result = np.correlate(x, x, mode='full')
return result[result.size//2:]
#################### MAX AND ABSMAX ##################################
printTitle("Max and AbsMax")
a=np.array([1.,-3.,4.,0.,-10.,8.])
printSubTitle("Float tests")
i=dsp.arm_max_f32(a)
print(i)
i=dsp.arm_absmax_f32(a)
print(i)
printSubTitle("Fixed point tests")
# Normalize for fixed point tests
a = a / i[0]
a31 = f.toQ31(a)
i=dsp.arm_absmax_q31(a31)
print(f.Q31toF32(i[0]),i[1])
a8 = f.toQ15(a)
i=dsp.arm_absmax_q15(a8)
print(f.Q15toF32(i[0]),i[1])
a7 = f.toQ7(a)
i=dsp.arm_absmax_q7(a7)
print(f.Q7toF32(i[0]),i[1])
################### MIN AND ABSMIN ################################
printTitle("Min and AbsMin")
a=np.array([1.,-3.,4.,0.5,-10.,8.])
printSubTitle("Float tests")
i=dsp.arm_min_f32(a)
print(i)
i=dsp.arm_absmin_f32(a)
print(i)
printSubTitle("Fixed point tests")
# Normalize for fixed point tests
idx=i[1]
i=dsp.arm_absmax_f32(a)
a = a / i[0]
print(a)
print(a[idx])
a31 = f.toQ31(a)
i=dsp.arm_absmin_q31(a31)
print(f.Q31toF32(i[0]),i[1])
a8 = f.toQ15(a)
i=dsp.arm_absmin_q15(a8)
print(f.Q15toF32(i[0]),i[1])
a7 = f.toQ7(a)
i=dsp.arm_absmin_q7(a7)
print(f.Q7toF32(i[0]),i[1])
##################### CLIPPING ###################
printTitle("Clipping tests tests")
a=np.array([1.,-3.,4.,0.5,-10.,8.])
i=dsp.arm_absmax_f32(a)
minBound =-5.0
maxBound =6.0
b=dsp.arm_clip_f32(a,minBound,maxBound)
print(a)
print(b)
a = a / i[0]
print(a)
minBound = minBound / i[0]
maxBound = maxBound / i[0]
print(minBound,maxBound)
b=dsp.arm_clip_q31(f.toQ31(a),f.toQ31(minBound),f.toQ31(maxBound))
print(f.Q31toF32(b))
b=dsp.arm_clip_q15(f.toQ15(a),f.toQ15(minBound),f.toQ15(maxBound))
print(f.Q15toF32(b))
b=dsp.arm_clip_q7(f.toQ7(a),f.toQ7(minBound),f.toQ7(maxBound))
print(f.Q7toF32(b))
############### MAT VECTOR MULT
printTitle("Matrix x Vector")
a=np.array([[1.,2,3,4],[5,6,7,8],[9,10,11,12]])
b=np.array([-2,-1,3,4])
c = np.dot(a,b)
print(c)
c = dsp.arm_mat_vec_mult_f32(a,b)
print(c)
printSubTitle("Fixed point")
normalizationFactor=2.0*np.sqrt(np.max(np.abs(c)))
a=a/normalizationFactor
b=b/normalizationFactor
print(np.dot(a,b))
c=dsp.arm_mat_vec_mult_q31(f.toQ31(a),f.toQ31(b))
print(f.Q31toF32(c))
c=dsp.arm_mat_vec_mult_q15(f.toQ15(a),f.toQ15(b))
print(f.Q15toF32(c))
c=dsp.arm_mat_vec_mult_q7(f.toQ7(a),f.toQ7(b))
print(f.Q7toF32(c))
############### MATRIX MULTIPLY
printTitle("Matrix x Matrix")
a=np.array([[1.,2,3,4],[5,6,7,8],[9,10,11,12]])
b=np.array([[1.,2,3],[5.1,6,7],[9.1,10,11],[5,8,4]])
print(np.dot(a , b))
c=dsp.arm_mat_mult_f32(a,b)
print(c[1])
printSubTitle("Fixed point")
normalizationFactor=2.0*np.sqrt(np.max(np.abs(c[1])))
a = a / normalizationFactor
b = b / normalizationFactor
c=dsp.arm_mat_mult_f32(a,b)
print(c[1])
print("")
af = f.toQ31(a)
bf = f.toQ31(b)
c = dsp.arm_mat_mult_q31(af,bf)
print(f.Q31toF32(c[1]))
print("")
af = f.toQ15(a)
bf = f.toQ15(b)
s=bf.shape
nb=s[0]*s[1]
tmp=np.zeros(nb)
c = dsp.arm_mat_mult_q15(af,bf,tmp)
print(f.Q15toF32(c[1]))
print("")
af = f.toQ7(a)
bf = f.toQ7(b)
s=bf.shape
nb=s[0]*s[1]
tmp=np.zeros(nb)
c = dsp.arm_mat_mult_q7(af,bf,tmp)
print(f.Q7toF32(c[1]))
################# MAT TRANSPOSE #################
printTitle("Transposition")
a=np.array([[1.,2,3,4],[5,6,7,8],[9,10,11,12]])
normalizationFactor=np.max(np.abs(c[1]))
a = a / normalizationFactor
print(np.transpose(a))
print("")
r=dsp.arm_mat_trans_f32(a)
print(r[1])
print("")
r=dsp.arm_mat_trans_q31(f.toQ31(a))
print(f.Q31toF32(r[1]))
print("")
r=dsp.arm_mat_trans_q15(f.toQ15(a))
print(f.Q15toF32(r[1]))
print("")
r=dsp.arm_mat_trans_q7(f.toQ7(a))
print(f.Q7toF32(r[1]))
print("")
################## FILL FUNCTIONS #################
v=0.22
nb=10
a=np.full((nb,),v)
print(a)
a=dsp.arm_fill_f32(v,nb)
print(a)
a=f.Q31toF32(dsp.arm_fill_q31(f.toQ31(v),nb))
print(a)
a=f.Q15toF32(dsp.arm_fill_q15(f.toQ15(v),nb))
print(a)
a=f.Q7toF32(dsp.arm_fill_q7(f.toQ7(v),nb))
print(a)
################# COMPLEX MAT TRANSPOSE #################
printTitle("Complex Transposition")
a=np.array([[1. + 0.0j ,2 + 1.0j,3 + 0.0j,4 + 2.0j],
[5 + 1.0j,6 + 2.0j,7 + 3.0j,8 + 1.0j],
[9 - 2.0j,10 + 1.0j,11 - 4.0j,12 + 1.0j]])
normalizationFactor=np.max(np.abs(c[1]))
a = a / normalizationFactor
print(np.transpose(a))
print("")
r=dsp.arm_mat_cmplx_trans_f32(imToReal2D(a))
print(realToIm2D(r[1]))
print("")
r=dsp.arm_mat_cmplx_trans_q31(f.toQ31(imToReal2D(a)))
print(realToIm2D(f.Q31toF32(r[1])))
print("")
r=dsp.arm_mat_cmplx_trans_q15(f.toQ15(imToReal2D(a)))
print(realToIm2D(f.Q15toF32(r[1])))
print("")
################ Levinson ##################
printTitle("Levinson Durbin")
na=5
s = np.random.randn(na+1)
s = normalize(s)
phi = autocorr(s)
phi = normalize(phi)
sigmav,arcoef,pacf,sigma,phi1=statsmodels.tsa.stattools.levinson_durbin(phi,nlags=na,isacov=True)
print(arcoef)
print(sigmav)
(a,err)=dsp.arm_levinson_durbin_f32(phi,na)
print(a)
print(err)
phiQ31 = f.toQ31(phi)
(aQ31,errQ31)=dsp.arm_levinson_durbin_q31(phiQ31,na)
print(f.Q31toF32(aQ31))
print(f.Q31toF32(errQ31))
################## Bitwise operations #################
printTitle("Bitwise operations")
def genBitvectors(nb,format):
if format == 31:
maxVal = 0x7fffffff
if format == 15:
maxVal = 0x7fff
if format == 7:
maxVal = 0x7f
minVal = -maxVal-1
return(np.random.randint(minVal, maxVal, size=nb))
NBSAMPLES=10
printSubTitle("u32")
su32A=genBitvectors(NBSAMPLES,31)
su32B=genBitvectors(NBSAMPLES,31)
ffff = (np.ones(NBSAMPLES)*(-1)).astype(np.int)
ref=np.bitwise_and(su32A, su32B)
#print(ref)
result=dsp.arm_and_u32(su32A, su32B).astype(int)
print(result-ref)
ref=np.bitwise_or(su32A, su32B)
#print(ref)
result=dsp.arm_or_u32(su32A, su32B).astype(int)
print(result-ref)
ref=np.bitwise_xor(su32A, su32B)
#print(ref)
result=dsp.arm_xor_u32(su32A, su32B).astype(int)
print(result-ref)
ref=np.bitwise_xor(ffff, su32A)
#print(ref)
result=dsp.arm_not_u32(su32A).astype(int)
print(result-ref)
printSubTitle("u16")
su16A=genBitvectors(NBSAMPLES,15)
su16B=genBitvectors(NBSAMPLES,15)
ffff = (np.ones(NBSAMPLES)*(-1)).astype(np.int)
ref=np.bitwise_and(su16A, su16B)
#print(ref)
result=dsp.arm_and_u16(su16A, su16B).astype(np.short)
print(result-ref)
ref=np.bitwise_or(su16A, su16B)
#print(ref)
result=dsp.arm_or_u16(su16A, su16B).astype(np.short)
print(result-ref)
ref=np.bitwise_xor(su16A, su16B)
#print(ref)
result=dsp.arm_xor_u16(su16A, su16B).astype(np.short)
print(result-ref)
ref=np.bitwise_xor(ffff, su16A)
#print(ref)
result=dsp.arm_not_u16(su16A).astype(np.short)
print(result-ref)
printSubTitle("u8")
su8A=genBitvectors(NBSAMPLES,7)
su8B=genBitvectors(NBSAMPLES,7)
ref=np.bitwise_and(su8A, su8B)
#print(ref)
result=dsp.arm_and_u8(su8A, su8B).astype(np.byte)
print(result-ref)
ref=np.bitwise_or(su8A, su8B)
#print(ref)
result=dsp.arm_or_u8(su8A, su8B).astype(np.byte)
print(result-ref)
ref=np.bitwise_xor(su8A, su8B)
#print(ref)
result=dsp.arm_xor_u8(su8A, su8B).astype(np.byte)
print(result-ref)
ref=np.bitwise_xor(ffff, su8A)
#print(ref)
result=dsp.arm_not_u8(su8A).astype(np.byte)
print(result-ref)
#################### Quaternion tests ##################
NBSAMPLES=3
def flattenQuat(l):
return(np.array([list(x) for x in l]).reshape(4*len(l)))
def flattenRot(l):
return(np.array([list(x) for x in l]).reshape(9*len(l)))
# q and -q are representing the same rotation.
# So there is an ambiguity for the tests.
# We force the real part of be positive.
def mkQuaternion(mat):
q=Quaternion(matrix=mat)
if q.scalar < 0:
return(-q)
else:
return(q)
a=[2.0*Quaternion.random() for x in range(NBSAMPLES)]
src=flattenQuat(a)
res=flattenQuat([x.normalised for x in a])
print(res)
output=dsp.arm_quaternion_normalize_f32(src)
print(output)
print("")
res=flattenQuat([x.conjugate for x in a])
print(res)
output=dsp.arm_quaternion_conjugate_f32(src)
print(output)
print("")
res=flattenQuat([x.inverse for x in a])
print(res)
output=dsp.arm_quaternion_inverse_f32(src)
print(output)
print("")
res=[x.norm for x in a]
print(res)
output=dsp.arm_quaternion_norm_f32(src)
print(output)
print("")
a=[x.normalised for x in a]
ra=[x.rotation_matrix for x in a]
rb=[mkQuaternion(x) for x in ra]
srca=flattenQuat(a)
resa=dsp.arm_quaternion2rotation_f32(srca)
resb=dsp.arm_rotation2quaternion_f32(resa)
print(ra)
print(resa)
print("")
print(rb)
print(resb)#
a=[2.0*Quaternion.random() for x in range(NBSAMPLES)]
b=[2.0*Quaternion.random() for x in range(NBSAMPLES)]
c = np.array(a) * np.array(b)
print(c)
srca=flattenQuat(a)
srcb=flattenQuat(b)
resc=dsp.arm_quaternion_product_f32(srca,srcb)
print(resc)
print(a[0]*b[0])
res=dsp.arm_quaternion_product_single_f32(srca[0:4],srcb[0:4])
print(res)
| apache-2.0 |
tsgit/invenio | modules/miscutil/lib/upgrades/invenio_2013_03_18_aidPERSONIDDATA_last_updated.py | 18 | 1694 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import warnings
from invenio.dbquery import run_sql
from invenio.textutils import wait_for_user
depends_on = ['invenio_release_1_1_0']
def info():
return "Introduces aidPERSONIDDATA last_updated column and new table indexes"
def do_upgrade():
column_exists = run_sql("SHOW COLUMNS FROM `aidPERSONIDDATA` LIKE 'last_updated'")
if not column_exists:
run_sql("""
ALTER TABLE aidPERSONIDDATA
ADD COLUMN last_updated TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL
DEFAULT CURRENT_TIMESTAMP AFTER opt3,
ADD INDEX `timestamp-b` (`last_updated`)
""")
indexes = [i[2] for i in run_sql('SHOW INDEX FROM aidPERSONIDPAPERS')]
if 'personid-flag-b' not in indexes:
run_sql("""
ALTER TABLE aidPERSONIDPAPERS
ADD INDEX `personid-flag-b` (`personid`, `flag`)
""")
def estimate():
return 1
| gpl-2.0 |
Kongsea/tensorflow | tensorflow/contrib/tensorboard/plugins/trace/trace_test.py | 124 | 2749 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.contrib.tensorboard.plugins.trace package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
from google.protobuf import json_format
from tensorflow.contrib.tensorboard.plugins import trace
from tensorflow.python.framework import constant_op
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
class TraceTest(test.TestCase):
def setUp(self):
self._temp_dir = tempfile.mkdtemp()
self._temp_trace_json = self._temp_dir + 'trace.json'
def tearDown(self):
gfile.DeleteRecursively(self._temp_dir)
def testEmptyGraph(self):
trace_info = self._store_and_read_trace_info()
self.assertEqual(len(trace_info.ops), 0)
def testHasSourceCodeOfThisFile(self):
constant_op.constant(0)
trace_info = self._store_and_read_trace_info()
self.assertTrue(trace_info.files)
for file_info in trace_info.files:
if file_info.file_path.endswith('trace_test.py'):
return
self.fail('trace_test file not found in the trace info json')
def testHasTheConstantOp(self):
constant_op.constant(0)
trace_info = self._store_and_read_trace_info()
self.assertTrue(trace_info.ops)
for op in trace_info.ops:
if op.op_type == 'Const':
return
self.fail('Could not find operation of type `Const` in the graph')
def testMultilineStatements(self):
source = """def test():
a(4,
3,
1)
b(3, 4, 5)
c((4, 3),
(),
)
"""
line2start = trace.find_multiline_statements(source)
self.assertEqual(line2start[3], 1)
self.assertEqual(line2start[9], 7)
self.assertEqual(len(line2start), 2)
def _store_and_read_trace_info(self):
trace.store_trace_info(self._temp_trace_json)
trace_info = trace.TraceInfo()
with gfile.Open(self._temp_trace_json) as f:
text = f.read()
json_format.Parse(text, trace_info)
return trace_info
if __name__ == '__main__':
test.main()
| apache-2.0 |
xychu/product-definition-center | pdc/apps/compose/lib.py | 3 | 12345 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import os
import json
import kobo
from django.db import transaction, connection
from django.db.models import Q
from rest_framework import serializers
from pdc.apps.package.models import RPM
from pdc.apps.common import hacks as common_hacks
from pdc.apps.common import models as common_models
from pdc.apps.package import models as package_models
from pdc.apps.repository import models as repository_models
from pdc.apps.release import models as release_models
from pdc.apps.release import lib
from pdc.apps.compose import models
from pdc.apps.release.models import Release
from pdc.apps.component.models import ReleaseComponent
def _maybe_raise_inconsistency_error(composeinfo, manifest, name):
"""Raise ValidationError if compose id is not the same in both files.
The name should describe the kind of manifest.
"""
if composeinfo.compose.id != manifest.compose.id:
raise serializers.ValidationError(
{'detail': ['Inconsistent data: different compose id in composeinfo and {0} file.'.format(name)]})
def get_or_insert_rpm(rpms_in_db, cursor, rpm_nevra, srpm_nevra, filename):
rpm_id = rpms_in_db.get(rpm_nevra, None)
if not rpm_id:
rpm_id = package_models.RPM.bulk_insert(cursor, rpm_nevra, filename, srpm_nevra)
rpms_in_db[rpm_nevra] = rpm_id
return rpm_id
def insert_compose_rpms_if_nonexist(compose_rpms_in_db, cursor,
variant_arch_id, rpm_id,
content_category_id, sigkey_id, path_id):
key = "%s/%s" % (variant_arch_id, rpm_id)
if key not in compose_rpms_in_db:
models.ComposeRPM.bulk_insert(cursor,
variant_arch_id,
rpm_id,
content_category_id,
sigkey_id,
path_id)
compose_rpms_in_db.add(key)
def _link_compose_to_integrated_product(request, compose, variant):
"""
If the variant belongs to an integrated layered product, update the compose
so that it is linked to the release for that product. Note that the variant
argument should be variant retrieved from compose info, not a PDC model.
"""
release = variant.release
if release.name:
integrated_from_release = lib.get_or_create_integrated_release(
request,
compose.release,
release
)
compose.linked_releases.add(integrated_from_release)
def _add_compose_create_msg(request, compose_obj):
"""
Add compose create message to request._messagings.
"""
msg = {'action': 'create',
'compose_id': compose_obj.compose_id,
'compose_date': compose_obj.compose_date.isoformat(),
'compose_type': compose_obj.compose_type.name,
'compose_respin': compose_obj.compose_respin}
request._request._messagings.append(('.compose', json.dumps(msg)))
@transaction.atomic
def compose__import_rpms(request, release_id, composeinfo, rpm_manifest):
release_obj = release_models.Release.objects.get(release_id=release_id)
ci = common_hacks.deserialize_composeinfo(composeinfo)
rm = common_hacks.deserialize_rpms(rpm_manifest)
_maybe_raise_inconsistency_error(ci, rm, 'rpms')
compose_date = "%s-%s-%s" % (ci.compose.date[:4], ci.compose.date[4:6], ci.compose.date[6:])
compose_type = models.ComposeType.objects.get(name=ci.compose.type)
acceptance_status = models.ComposeAcceptanceTestingState.objects.get(name='untested')
compose_obj, created = lib._logged_get_or_create(
request, models.Compose,
release=release_obj,
compose_id=ci.compose.id,
compose_date=compose_date,
compose_type=compose_type,
compose_respin=ci.compose.respin,
compose_label=ci.compose.label or None,
acceptance_testing=acceptance_status,
)
if created and hasattr(request._request, '_messagings'):
# add message
_add_compose_create_msg(request, compose_obj)
rpms_in_db = {}
qs = package_models.RPM.objects.all()
for rpm in qs.iterator():
key = "%s-%s:%s-%s.%s" % (rpm.name, rpm.epoch, rpm.version, rpm.release, rpm.arch)
rpms_in_db[key] = rpm.id
cursor = connection.cursor()
add_to_changelog = []
imported_rpms = 0
for variant in ci.get_variants(recursive=True):
_link_compose_to_integrated_product(request, compose_obj, variant)
variant_type = release_models.VariantType.objects.get(name=variant.type)
variant_obj, created = models.Variant.objects.get_or_create(
compose=compose_obj,
variant_id=variant.id,
variant_uid=variant.uid,
variant_name=variant.name,
variant_type=variant_type
)
if created:
add_to_changelog.append(variant_obj)
for arch in variant.arches:
arch_obj = common_models.Arch.objects.get(name=arch)
var_arch_obj, _ = models.VariantArch.objects.get_or_create(arch=arch_obj,
variant=variant_obj)
compose_rpms_in_db = set()
qs = models.ComposeRPM.objects.filter(variant_arch=var_arch_obj).values_list('variant_arch_id',
'rpm_id')
for (variant_arch_id, rpm_id) in qs.iterator():
key = "%s/%s" % (variant_arch_id, rpm_id)
compose_rpms_in_db.add(key)
sources = set()
for srpm_nevra, rpms in rm.rpms.get(variant.uid, {}).get(arch, {}).iteritems():
sources.add(srpm_nevra)
for rpm_nevra, rpm_data in rpms.iteritems():
imported_rpms += 1
path, filename = os.path.split(rpm_data['path'])
rpm_id = get_or_insert_rpm(rpms_in_db, cursor, rpm_nevra, srpm_nevra, filename)
sigkey_id = common_models.SigKey.get_cached_id(rpm_data["sigkey"], create=True)
path_id = models.Path.get_cached_id(path, create=True)
content_category = rpm_data["category"]
content_category_id = repository_models.ContentCategory.get_cached_id(content_category)
insert_compose_rpms_if_nonexist(compose_rpms_in_db, cursor,
var_arch_obj.id, rpm_id,
content_category_id, sigkey_id, path_id)
for obj in add_to_changelog:
lib._maybe_log(request, True, obj)
request.changeset.add('notice', 0, 'null',
json.dumps({
'compose': compose_obj.compose_id,
'num_linked_rpms': imported_rpms,
}))
@transaction.atomic
def compose__import_images(request, release_id, composeinfo, image_manifest):
release_obj = release_models.Release.objects.get(release_id=release_id)
ci = common_hacks.deserialize_composeinfo(composeinfo)
im = common_hacks.deserialize_images(image_manifest)
_maybe_raise_inconsistency_error(ci, im, 'images')
compose_date = "%s-%s-%s" % (ci.compose.date[:4], ci.compose.date[4:6], ci.compose.date[6:])
compose_type = models.ComposeType.objects.get(name=ci.compose.type)
compose_obj, created = lib._logged_get_or_create(
request, models.Compose,
release=release_obj,
compose_id=ci.compose.id,
compose_date=compose_date,
compose_type=compose_type,
compose_respin=ci.compose.respin,
compose_label=ci.compose.label or None,
)
if created and hasattr(request._request, '_messagings'):
# add message
_add_compose_create_msg(request, compose_obj)
add_to_changelog = []
imported_images = 0
for variant in ci.get_variants(recursive=True):
_link_compose_to_integrated_product(request, compose_obj, variant)
variant_type = release_models.VariantType.objects.get(name=variant.type)
variant_obj, created = models.Variant.objects.get_or_create(
compose=compose_obj,
variant_id=variant.id,
variant_uid=variant.uid,
variant_name=variant.name,
variant_type=variant_type
)
if created:
add_to_changelog.append(variant_obj)
for arch in variant.arches:
arch_obj = common_models.Arch.objects.get(name=arch)
var_arch_obj, created = models.VariantArch.objects.get_or_create(arch=arch_obj, variant=variant_obj)
for i in im.images.get(variant.uid, {}).get(arch, []):
path, file_name = os.path.split(i.path)
path_id = models.Path.get_cached_id(path, create=True)
image, _ = package_models.Image.objects.get_or_create(
file_name=file_name, sha256=i.checksums["sha256"],
defaults={
'image_format_id': package_models.ImageFormat.get_cached_id(i.format),
'image_type_id': package_models.ImageType.get_cached_id(i.type),
'disc_number': i.disc_number,
'disc_count': i.disc_count,
'arch': i.arch,
'mtime': i.mtime,
'size': i.size,
'bootable': i.bootable,
'implant_md5': i.implant_md5,
'volume_id': i.volume_id,
'md5': i.checksums.get("md5", None),
'sha1': i.checksums.get("sha1", None),
}
)
mi, created = models.ComposeImage.objects.get_or_create(
variant_arch=var_arch_obj,
image=image,
path_id=path_id)
imported_images += 1
for obj in add_to_changelog:
lib._maybe_log(request, True, obj)
request.changeset.add('notice', 0, 'null',
json.dumps({
'compose': compose_obj.compose_id,
'num_linked_images': imported_images,
}))
def _find_composes_srpm_name_with_rpm_nvr(nvr):
"""
Filter composes and SRPM's name with rpm nvr
"""
try:
nvr = kobo.rpmlib.parse_nvr(nvr)
except ValueError:
raise ValueError("Invalid NVR: %s" % nvr)
q = Q()
q &= Q(variant__variantarch__composerpm__rpm__name=nvr["name"])
q &= Q(variant__variantarch__composerpm__rpm__version=nvr["version"])
q &= Q(variant__variantarch__composerpm__rpm__release=nvr["release"])
rpms = RPM.objects.filter(name=nvr["name"], version=nvr["version"], release=nvr["release"])
srpm_name = None
if rpms:
srpm_name = list(set([rpm.srpm_name for rpm in rpms.distinct()]))[0]
if srpm_name is None:
raise ValueError("not found")
return models.Compose.objects.filter(q).distinct(), srpm_name
def find_bugzilla_products_and_components_with_rpm_nvr(nvr):
"""
Filter bugzilla products and components with rpm nvr
"""
composes, srpm_name = _find_composes_srpm_name_with_rpm_nvr(nvr)
release_ids = [compose.release for compose in composes]
releases = [Release.objects.get(release_id=release_id) for release_id in release_ids]
result = []
for release in releases:
bugzilla = dict()
bugzilla['bugzilla_product'] = release.bugzilla_product
component_names = common_hacks.srpm_name_to_component_names(srpm_name)
release_components = ReleaseComponent.objects.filter(
release=release,
name__in=component_names).distinct()
bugzilla['bugzilla_component'] = [rc.bugzilla_component.export()
for rc in release_components
if rc.bugzilla_component]
if bugzilla not in result:
result.append(bugzilla)
return result
| mit |
stricaud/dionaea | modules/python/scripts/logsql.py | 8 | 33468 | #********************************************************************************
#* Dionaea
#* - catches bugs -
#*
#*
#*
#* Copyright (C) 2009 Paul Baecher & Markus Koetter
#*
#* This program is free software; you can redistribute it and/or
#* modify it under the terms of the GNU General Public License
#* as published by the Free Software Foundation; either version 2
#* of the License, or (at your option) any later version.
#*
#* This program is distributed in the hope that it will be useful,
#* but WITHOUT ANY WARRANTY; without even the implied warranty of
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#* GNU General Public License for more details.
#*
#* You should have received a copy of the GNU General Public License
#* along with this program; if not, write to the Free Software
#* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#*
#*
#* contact [email protected]
#*
#*******************************************************************************/
from dionaea.core import ihandler, incident, g_dionaea
import os
import logging
import random
import json
import sqlite3
import time
logger = logging.getLogger('logsql')
logger.setLevel(logging.DEBUG)
class logsqlhandler(ihandler):
def __init__(self, path):
logger.debug("%s ready!" % (self.__class__.__name__))
self.path = path
def start(self):
ihandler.__init__(self, self.path)
# mapping socket -> attackid
self.attacks = {}
self.pending = {}
# self.dbh = sqlite3.connect(user = g_dionaea.config()['modules']['python']['logsql']['file'])
file = g_dionaea.config()['modules']['python']['logsql']['sqlite']['file']
self.dbh = sqlite3.connect(file)
self.cursor = self.dbh.cursor()
update = False
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
connections (
connection INTEGER PRIMARY KEY,
connection_type TEXT,
connection_transport TEXT,
connection_protocol TEXT,
connection_timestamp INTEGER,
connection_root INTEGER,
connection_parent INTEGER,
local_host TEXT,
local_port INTEGER,
remote_host TEXT,
remote_hostname TEXT,
remote_port INTEGER
)""")
self.cursor.execute("""CREATE TRIGGER IF NOT EXISTS connections_INSERT_update_connection_root_trg
AFTER INSERT ON connections
FOR EACH ROW
WHEN
new.connection_root IS NULL
BEGIN
UPDATE connections SET connection_root = connection WHERE connection = new.connection AND new.connection_root IS NULL;
END""")
for idx in ["type","timestamp","root","parent"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS connections_%s_idx
ON connections (connection_%s)""" % (idx, idx))
for idx in ["local_host","local_port","remote_host"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS connections_%s_idx
ON connections (%s)""" % (idx, idx))
# self.cursor.execute("""CREATE TABLE IF NOT EXISTS
# bistreams (
# bistream INTEGER PRIMARY KEY,
# connection INTEGER,
# bistream_data TEXT
# )""")
#
# self.cursor.execute("""CREATE TABLE IF NOT EXISTS
# smbs (
# smb INTEGER PRIMARY KEY,
# connection INTEGER,
# smb_direction TEXT,
# smb_action TEXT,
# CONSTRAINT smb_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
# )""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
dcerpcbinds (
dcerpcbind INTEGER PRIMARY KEY,
connection INTEGER,
dcerpcbind_uuid TEXT,
dcerpcbind_transfersyntax TEXT
-- CONSTRAINT dcerpcs_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
for idx in ["uuid","transfersyntax"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS dcerpcbinds_%s_idx
ON dcerpcbinds (dcerpcbind_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
dcerpcrequests (
dcerpcrequest INTEGER PRIMARY KEY,
connection INTEGER,
dcerpcrequest_uuid TEXT,
dcerpcrequest_opnum INTEGER
-- CONSTRAINT dcerpcs_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
for idx in ["uuid","opnum"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS dcerpcrequests_%s_idx
ON dcerpcrequests (dcerpcrequest_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
dcerpcservices (
dcerpcservice INTEGER PRIMARY KEY,
dcerpcservice_uuid TEXT,
dcerpcservice_name TEXT,
CONSTRAINT dcerpcservice_uuid_uniq UNIQUE (dcerpcservice_uuid)
)""")
from uuid import UUID
from dionaea.smb import rpcservices
import inspect
services = inspect.getmembers(rpcservices, inspect.isclass)
for name, servicecls in services:
if not name == 'RPCService' and issubclass(servicecls, rpcservices.RPCService):
try:
self.cursor.execute("INSERT INTO dcerpcservices (dcerpcservice_name, dcerpcservice_uuid) VALUES (?,?)",
(name, str(UUID(hex=servicecls.uuid))) )
except Exception as e:
# print("dcerpcservice %s existed %s " % (servicecls.uuid, e) )
pass
logger.info("Getting RPC Services")
r = self.cursor.execute("SELECT * FROM dcerpcservices")
# print(r)
names = [r.description[x][0] for x in range(len(r.description))]
r = [ dict(zip(names, i)) for i in r]
# print(r)
r = dict([(UUID(i['dcerpcservice_uuid']).hex,i['dcerpcservice']) for i in r])
# print(r)
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
dcerpcserviceops (
dcerpcserviceop INTEGER PRIMARY KEY,
dcerpcservice INTEGER,
dcerpcserviceop_opnum INTEGER,
dcerpcserviceop_name TEXT,
dcerpcserviceop_vuln TEXT,
CONSTRAINT dcerpcop_service_opnum_uniq UNIQUE (dcerpcservice, dcerpcserviceop_opnum)
)""")
logger.info("Setting RPC ServiceOps")
for name, servicecls in services:
if not name == 'RPCService' and issubclass(servicecls, rpcservices.RPCService):
for opnum in servicecls.ops:
op = servicecls.ops[opnum]
uuid = servicecls.uuid
vuln = ''
dcerpcservice = r[uuid]
if opnum in servicecls.vulns:
vuln = servicecls.vulns[opnum]
try:
self.cursor.execute("INSERT INTO dcerpcserviceops (dcerpcservice, dcerpcserviceop_opnum, dcerpcserviceop_name, dcerpcserviceop_vuln) VALUES (?,?,?,?)",
(dcerpcservice, opnum, op, vuln))
except:
# print("%s %s %s %s %s existed" % (dcerpcservice, uuid, name, op, vuln))
pass
# NetPathCompare was called NetCompare in dcerpcserviceops
try:
logger.debug("Trying to update table: dcerpcserviceops")
x = self.cursor.execute("""SELECT * FROM dcerpcserviceops WHERE dcerpcserviceop_name = 'NetCompare'""").fetchall()
if len(x) > 0:
self.cursor.execute("""UPDATE dcerpcserviceops SET dcerpcserviceop_name = 'NetPathCompare' WHERE dcerpcserviceop_name = 'NetCompare'""")
logger.debug("... done")
else:
logger.info("... not required")
except Exception as e:
print(e)
logger.info("... not required")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
emu_profiles (
emu_profile INTEGER PRIMARY KEY,
connection INTEGER,
emu_profile_json TEXT
-- CONSTRAINT emu_profiles_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
# fix a typo on emu_services table definition
# emu_services.emu_serive is wrong, should be emu_services.emu_service
# 1) rename table, create the proper table
try:
logger.debug("Trying to update table: emu_services")
self.cursor.execute("""SELECT emu_serivce FROM emu_services LIMIT 1""")
self.cursor.execute("""ALTER TABLE emu_services RENAME TO emu_services_old""")
update = True
except Exception as e:
logger.debug("... not required")
update = False
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
emu_services (
emu_serivce INTEGER PRIMARY KEY,
connection INTEGER,
emu_service_url TEXT
-- CONSTRAINT emu_services_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
# 2) copy all values to proper table, drop old table
try:
if update == True:
self.cursor.execute("""
INSERT INTO
emu_services (emu_service, connection, emu_service_url)
SELECT
emu_serivce, connection, emu_service_url
FROM emu_services_old""")
self.cursor.execute("""DROP TABLE emu_services_old""")
logger.debug("... done")
except Exception as e:
logger.debug("Updating emu_services failed, copying old table failed (%s)" % e)
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
offers (
offer INTEGER PRIMARY KEY,
connection INTEGER,
offer_url TEXT
-- CONSTRAINT offers_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
self.cursor.execute("""CREATE INDEX IF NOT EXISTS offers_url_idx ON offers (offer_url)""")
# fix a type on downloads table definition
# downloads.downloads is wrong, should be downloads.download
# 1) rename table, create the proper table
try:
logger.debug("Trying to update table: downloads")
self.cursor.execute("""SELECT downloads FROM downloads LIMIT 1""")
self.cursor.execute("""ALTER TABLE downloads RENAME TO downloads_old""")
update = True
except Exception as e:
# print(e)
logger.debug("... not required")
update = False
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
downloads (
download INTEGER PRIMARY KEY,
connection INTEGER,
download_url TEXT,
download_md5_hash TEXT
-- CONSTRAINT downloads_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
# 2) copy all values to proper table, drop old table
try:
if update == True:
self.cursor.execute("""
INSERT INTO
downloads (download, connection, download_url, download_md5_hash)
SELECT
downloads, connection, download_url, download_md5_hash
FROM downloads_old""")
self.cursor.execute("""DROP TABLE downloads_old""")
logger.debug("... done")
except Exeption as e:
logger.debug("Updating downloads failed, copying old table failed (%s)" % e)
for idx in ["url", "md5_hash"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS downloads_%s_idx
ON downloads (download_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
resolves (
resolve INTEGER PRIMARY KEY,
connection INTEGER,
resolve_hostname TEXT,
resolve_type TEXT,
resolve_result TEXT
)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
p0fs (
p0f INTEGER PRIMARY KEY,
connection INTEGER,
p0f_genre TEXT,
p0f_link TEXT,
p0f_detail TEXT,
p0f_uptime INTEGER,
p0f_tos TEXT,
p0f_dist INTEGER,
p0f_nat INTEGER,
p0f_fw INTEGER
-- CONSTRAINT p0fs_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
for idx in ["genre","detail","uptime"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS p0fs_%s_idx
ON p0fs (p0f_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
logins (
login INTEGER PRIMARY KEY,
connection INTEGER,
login_username TEXT,
login_password TEXT
-- CONSTRAINT logins_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
for idx in ["username","password"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS logins_%s_idx
ON logins (login_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
mssql_fingerprints (
mssql_fingerprint INTEGER PRIMARY KEY,
connection INTEGER,
mssql_fingerprint_hostname TEXT,
mssql_fingerprint_appname TEXT,
mssql_fingerprint_cltintname TEXT
-- CONSTRAINT mssql_fingerprints_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
for idx in ["hostname","appname","cltintname"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS mssql_fingerprints_%s_idx
ON mssql_fingerprints (mssql_fingerprint_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
mssql_commands (
mssql_command INTEGER PRIMARY KEY,
connection INTEGER,
mssql_command_status TEXT,
mssql_command_cmd TEXT
-- CONSTRAINT mssql_commands_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
for idx in ["status"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS mssql_commands_%s_idx
ON mssql_commands (mssql_command_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS virustotals (
virustotal INTEGER PRIMARY KEY,
virustotal_md5_hash TEXT NOT NULL,
virustotal_timestamp INTEGER NOT NULL,
virustotal_permalink TEXT NOT NULL
)""")
for idx in ["md5_hash"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS virustotals_%s_idx
ON virustotals (virustotal_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS virustotalscans (
virustotalscan INTEGER PRIMARY KEY,
virustotal INTEGER NOT NULL,
virustotalscan_scanner TEXT NOT NULL,
virustotalscan_result TEXT
)""")
for idx in ["scanner","result"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS virustotalscans_%s_idx
ON virustotalscans (virustotalscan_%s)""" % (idx, idx))
self.cursor.execute("""CREATE INDEX IF NOT EXISTS virustotalscans_virustotal_idx
ON virustotalscans (virustotal)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
mysql_commands (
mysql_command INTEGER PRIMARY KEY,
connection INTEGER,
mysql_command_cmd NUMBER NOT NULL
-- CONSTRAINT mysql_commands_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
mysql_command_args (
mysql_command_arg INTEGER PRIMARY KEY,
mysql_command INTEGER,
mysql_command_arg_index NUMBER NOT NULL,
mysql_command_arg_data TEXT NOT NULL
-- CONSTRAINT mysql_commands_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
for idx in ["command"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS mysql_command_args_%s_idx
ON mysql_command_args (mysql_%s)""" % (idx, idx))
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
mysql_command_ops (
mysql_command_op INTEGER PRIMARY KEY,
mysql_command_cmd INTEGER NOT NULL,
mysql_command_op_name TEXT NOT NULL,
CONSTRAINT mysql_command_cmd_uniq UNIQUE (mysql_command_cmd)
)""")
from dionaea.mysql.include.packets import MySQL_Commands
logger.info("Setting MySQL Command Ops")
for num,name in MySQL_Commands.items():
try:
self.cursor.execute("INSERT INTO mysql_command_ops (mysql_command_cmd, mysql_command_op_name) VALUES (?,?)",
(num, name))
except:
pass
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
sip_commands (
sip_command INTEGER PRIMARY KEY,
connection INTEGER,
sip_command_method ,
sip_command_call_id ,
sip_command_user_agent ,
sip_command_allow INTEGER
-- CONSTRAINT sip_commands_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
sip_addrs (
sip_addr INTEGER PRIMARY KEY,
sip_command INTEGER,
sip_addr_type ,
sip_addr_display_name,
sip_addr_uri_scheme,
sip_addr_uri_user,
sip_addr_uri_password,
sip_addr_uri_host,
sip_addr_uri_port
-- CONSTRAINT sip_addrs_command_fkey FOREIGN KEY (sip_command) REFERENCES sip_commands (sip_command)
)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
sip_vias (
sip_via INTEGER PRIMARY KEY,
sip_command INTEGER,
sip_via_protocol,
sip_via_address,
sip_via_port
-- CONSTRAINT sip_vias_command_fkey FOREIGN KEY (sip_command) REFERENCES sip_commands (sip_command)
)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
sip_sdp_origins (
sip_sdp_origin INTEGER PRIMARY KEY,
sip_command INTEGER,
sip_sdp_origin_username,
sip_sdp_origin_sess_id,
sip_sdp_origin_sess_version,
sip_sdp_origin_nettype,
sip_sdp_origin_addrtype,
sip_sdp_origin_unicast_address
-- CONSTRAINT sip_sdp_origins_fkey FOREIGN KEY (sip_command) REFERENCES sip_commands (sip_command)
)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
sip_sdp_connectiondatas (
sip_sdp_connectiondata INTEGER PRIMARY KEY,
sip_command INTEGER,
sip_sdp_connectiondata_nettype,
sip_sdp_connectiondata_addrtype,
sip_sdp_connectiondata_connection_address,
sip_sdp_connectiondata_ttl,
sip_sdp_connectiondata_number_of_addresses
-- CONSTRAINT sip_sdp_connectiondatas_fkey FOREIGN KEY (sip_command) REFERENCES sip_commands (sip_command)
)""")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS
sip_sdp_medias (
sip_sdp_media INTEGER PRIMARY KEY,
sip_command INTEGER,
sip_sdp_media_media,
sip_sdp_media_port,
sip_sdp_media_number_of_ports,
sip_sdp_media_proto
-- sip_sdp_media_fmt,
-- sip_sdp_media_attributes
-- CONSTRAINT sip_sdp_medias_fkey FOREIGN KEY (sip_command) REFERENCES sip_commands (sip_command)
)""")
# self.cursor.execute("""CREATE TABLE IF NOT EXISTS
# httpheaders (
# httpheader INTEGER PRIMARY KEY,
# connection INTEGER,
# http_headerkey TEXT,
# http_headervalue TEXT,
# -- CONSTRAINT httpheaders_connection_fkey FOREIGN KEY (connection) REFERENCES connections (connection)
# )""")
#
# for idx in ["headerkey","headervalue"]:
# self.cursor.execute("""CREATE INDEX IF NOT EXISTS httpheaders_%s_idx
# ON httpheaders (httpheader_%s)""" % (idx, idx))
# connection index for all
for idx in ["dcerpcbinds", "dcerpcrequests", "emu_profiles", "emu_services", "offers", "downloads", "p0fs", "logins", "mssql_fingerprints", "mssql_commands","mysql_commands","sip_commands"]:
self.cursor.execute("""CREATE INDEX IF NOT EXISTS %s_connection_idx ON %s (connection)""" % (idx, idx))
self.dbh.commit()
# updates, database schema corrections for old versions
# svn rev 2143 removed the table dcerpcs
# and created the table dcerpcrequests
#
# copy the data to the new table dcerpcrequests
# drop the old table
try:
logger.debug("Updating Table dcerpcs")
self.cursor.execute("""INSERT INTO
dcerpcrequests (connection, dcerpcrequest_uuid, dcerpcrequest_opnum)
SELECT
connection, dcerpc_uuid, dcerpc_opnum
FROM
dcerpcs""")
self.cursor.execute("""DROP TABLE dcerpcs""")
logger.debug("... done")
except Exception as e:
# print(e)
logger.debug("... not required")
def __del__(self):
logger.info("Closing sqlite handle")
self.cursor.close()
self.cursor = None
self.dbh.close()
self.dbh = None
def handle_incident(self, icd):
# print("unknown")
pass
def connection_insert(self, icd, connection_type):
con=icd.con
r = self.cursor.execute("INSERT INTO connections (connection_timestamp, connection_type, connection_transport, connection_protocol, local_host, local_port, remote_host, remote_hostname, remote_port) VALUES (?,?,?,?,?,?,?,?,?)",
(time.time(), connection_type, con.transport, con.protocol, con.local.host, con.local.port, con.remote.host, con.remote.hostname, con.remote.port) )
attackid = self.cursor.lastrowid
self.attacks[con] = (attackid, attackid)
self.dbh.commit()
# maybe this was a early connection?
if con in self.pending:
# the connection was linked before we knew it
# that means we have to
# - update the connection_root and connection_parent for all connections which had the pending
# - update the connection_root for all connections which had the 'childid' as connection_root
for i in self.pending[con]:
print("%s %s %s" % (attackid, attackid, i))
self.cursor.execute("UPDATE connections SET connection_root = ?, connection_parent = ? WHERE connection = ?",
(attackid, attackid, i ) )
self.cursor.execute("UPDATE connections SET connection_root = ? WHERE connection_root = ?",
(attackid, i ) )
self.dbh.commit()
return attackid
def handle_incident_dionaea_connection_tcp_listen(self, icd):
attackid = self.connection_insert( icd, 'listen')
con=icd.con
logger.info("listen connection on %s:%i (id=%i)" %
(con.remote.host, con.remote.port, attackid))
def handle_incident_dionaea_connection_tls_listen(self, icd):
attackid = self.connection_insert( icd, 'listen')
con=icd.con
logger.info("listen connection on %s:%i (id=%i)" %
(con.remote.host, con.remote.port, attackid))
def handle_incident_dionaea_connection_tcp_connect(self, icd):
attackid = self.connection_insert( icd, 'connect')
con=icd.con
logger.info("connect connection to %s/%s:%i from %s:%i (id=%i)" %
(con.remote.host, con.remote.hostname, con.remote.port, con.local.host, con.local.port, attackid))
def handle_incident_dionaea_connection_tls_connect(self, icd):
attackid = self.connection_insert( icd, 'connect')
con=icd.con
logger.info("connect connection to %s/%s:%i from %s:%i (id=%i)" %
(con.remote.host, con.remote.hostname, con.remote.port, con.local.host, con.local.port, attackid))
def handle_incident_dionaea_connection_udp_connect(self, icd):
attackid = self.connection_insert( icd, 'connect')
con=icd.con
logger.info("connect connection to %s/%s:%i from %s:%i (id=%i)" %
(con.remote.host, con.remote.hostname, con.remote.port, con.local.host, con.local.port, attackid))
def handle_incident_dionaea_connection_tcp_accept(self, icd):
attackid = self.connection_insert( icd, 'accept')
con=icd.con
logger.info("accepted connection from %s:%i to %s:%i (id=%i)" %
(con.remote.host, con.remote.port, con.local.host, con.local.port, attackid))
def handle_incident_dionaea_connection_tls_accept(self, icd):
attackid = self.connection_insert( icd, 'accept')
con=icd.con
logger.info("accepted connection from %s:%i to %s:%i (id=%i)" %
(con.remote.host, con.remote.port, con.local.host, con.local.port, attackid))
def handle_incident_dionaea_connection_tcp_reject(self, icd):
attackid = self.connection_insert(icd, 'reject')
con=icd.con
logger.info("reject connection from %s:%i to %s:%i (id=%i)" %
(con.remote.host, con.remote.port, con.local.host, con.local.port, attackid))
def handle_incident_dionaea_connection_tcp_pending(self, icd):
attackid = self.connection_insert(icd, 'pending')
con=icd.con
logger.info("pending connection from %s:%i to %s:%i (id=%i)" %
(con.remote.host, con.remote.port, con.local.host, con.local.port, attackid))
def handle_incident_dionaea_connection_link_early(self, icd):
# if we have to link a connection with a connection we do not know yet,
# we store the unknown connection in self.pending and associate the childs id with it
if icd.parent not in self.attacks:
if icd.parent not in self.pending:
self.pending[icd.parent] = {self.attacks[icd.child][1]: True}
else:
if icd.child not in self.pending[icd.parent]:
self.pending[icd.parent][self.attacks[icd.child][1]] = True
def handle_incident_dionaea_connection_link(self, icd):
if icd.parent in self.attacks:
logger.info("parent ids %s" % str(self.attacks[icd.parent]))
parentroot, parentid = self.attacks[icd.parent]
if icd.child in self.attacks:
logger.info("child had ids %s" % str(self.attacks[icd.child]))
childroot, childid = self.attacks[icd.child]
else:
childid = parentid
self.attacks[icd.child] = (parentroot, childid)
logger.info("child has ids %s" % str(self.attacks[icd.child]))
logger.info("child %i parent %i root %i" % (childid, parentid, parentroot) )
r = self.cursor.execute("UPDATE connections SET connection_root = ?, connection_parent = ? WHERE connection = ?",
(parentroot, parentid, childid) )
self.dbh.commit()
if icd.child in self.pending:
# if the new accepted connection was pending
# assign the connection_root to all connections which have been waiting for this connection
parentroot, parentid = self.attacks[icd.parent]
if icd.child in self.attacks:
childroot, childid = self.attacks[icd.child]
else:
childid = parentid
self.cursor.execute("UPDATE connections SET connection_root = ? WHERE connection_root = ?",
(parentroot, childid) )
self.dbh.commit()
def handle_incident_dionaea_connection_free(self, icd):
con=icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
del self.attacks[con]
logger.info("attackid %i is done" % attackid)
else:
logger.warn("no attackid for %s:%s" % (con.local.host, con.local.port) )
if con in self.pending:
del self.pending[con]
def handle_incident_dionaea_module_emu_profile(self, icd):
con = icd.con
attackid = self.attacks[con][1]
logger.info("emu profile for attackid %i" % attackid)
self.cursor.execute("INSERT INTO emu_profiles (connection, emu_profile_json) VALUES (?,?)",
(attackid, icd.profile) )
self.dbh.commit()
def handle_incident_dionaea_download_offer(self, icd):
con=icd.con
attackid = self.attacks[con][1]
logger.info("offer for attackid %i" % attackid)
self.cursor.execute("INSERT INTO offers (connection, offer_url) VALUES (?,?)",
(attackid, icd.url) )
self.dbh.commit()
def handle_incident_dionaea_download_complete_hash(self, icd):
con=icd.con
attackid = self.attacks[con][1]
logger.info("complete for attackid %i" % attackid)
self.cursor.execute("INSERT INTO downloads (connection, download_url, download_md5_hash) VALUES (?,?,?)",
(attackid, icd.url, icd.md5hash) )
self.dbh.commit()
def handle_incident_dionaea_service_shell_listen(self, icd):
con=icd.con
attackid = self.attacks[con][1]
logger.info("listen shell for attackid %i" % attackid)
self.cursor.execute("INSERT INTO emu_services (connection, emu_service_url) VALUES (?,?)",
(attackid, "bindshell://"+str(icd.port)) )
self.dbh.commit()
def handle_incident_dionaea_service_shell_connect(self, icd):
con=icd.con
attackid = self.attacks[con][1]
logger.info("connect shell for attackid %i" % attackid)
self.cursor.execute("INSERT INTO emu_services (connection, emu_service_url) VALUES (?,?)",
(attackid, "connectbackshell://"+str(icd.host)+":"+str(icd.port)) )
self.dbh.commit()
def handle_incident_dionaea_detect_attack(self, icd):
con=icd.con
attackid = self.attacks[con]
def handle_incident_dionaea_modules_python_p0f(self, icd):
con=icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
self.cursor.execute("INSERT INTO p0fs (connection, p0f_genre, p0f_link, p0f_detail, p0f_uptime, p0f_tos, p0f_dist, p0f_nat, p0f_fw) VALUES (?,?,?,?,?,?,?,?,?)",
( attackid, icd.genre, icd.link, icd.detail, icd.uptime, icd.tos, icd.dist, icd.nat, icd.fw))
self.dbh.commit()
def handle_incident_dionaea_modules_python_smb_dcerpc_request(self, icd):
con=icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
self.cursor.execute("INSERT INTO dcerpcrequests (connection, dcerpcrequest_uuid, dcerpcrequest_opnum) VALUES (?,?,?)",
(attackid, icd.uuid, icd.opnum))
self.dbh.commit()
def handle_incident_dionaea_modules_python_smb_dcerpc_bind(self, icd):
con=icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
self.cursor.execute("INSERT INTO dcerpcbinds (connection, dcerpcbind_uuid, dcerpcbind_transfersyntax) VALUES (?,?,?)",
(attackid, icd.uuid, icd.transfersyntax))
self.dbh.commit()
def handle_incident_dionaea_modules_python_mssql_login(self, icd):
con = icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
self.cursor.execute("INSERT INTO logins (connection, login_username, login_password) VALUES (?,?,?)",
(attackid, icd.username, icd.password))
self.cursor.execute("INSERT INTO mssql_fingerprints (connection, mssql_fingerprint_hostname, mssql_fingerprint_appname, mssql_fingerprint_cltintname) VALUES (?,?,?,?)",
(attackid, icd.hostname, icd.appname, icd.cltintname))
self.dbh.commit()
def handle_incident_dionaea_modules_python_mssql_cmd(self, icd):
con = icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
self.cursor.execute("INSERT INTO mssql_commands (connection, mssql_command_status, mssql_command_cmd) VALUES (?,?,?)",
(attackid, icd.status, icd.cmd))
self.dbh.commit()
def handle_incident_dionaea_modules_python_virustotal_report(self, icd):
md5 = icd.md5hash
f = open(icd.path, mode='r')
j = json.load(f)
if j['result'] == 1: # file was known to virustotal
permalink = j['permalink']
date = j['report'][0]
self.cursor.execute("INSERT INTO virustotals (virustotal_md5_hash, virustotal_permalink, virustotal_timestamp) VALUES (?,?,strftime('%s',?))",
(md5, permalink, date))
self.dbh.commit()
virustotal = self.cursor.lastrowid
scans = j['report'][1]
for av in scans:
res = scans[av]
# not detected = '' -> NULL
if res == '':
res = None
self.cursor.execute("""INSERT INTO virustotalscans (virustotal, virustotalscan_scanner, virustotalscan_result) VALUES (?,?,?)""",
(virustotal, av, res))
# logger.debug("scanner {} result {}".format(av,scans[av]))
self.dbh.commit()
def handle_incident_dionaea_modules_python_mysql_login(self, icd):
con = icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
self.cursor.execute("INSERT INTO logins (connection, login_username, login_password) VALUES (?,?,?)",
(attackid, icd.username, icd.password))
self.dbh.commit()
def handle_incident_dionaea_modules_python_mysql_command(self, icd):
con = icd.con
if con in self.attacks:
attackid = self.attacks[con][1]
self.cursor.execute("INSERT INTO mysql_commands (connection, mysql_command_cmd) VALUES (?,?)",
(attackid, icd.command))
cmdid = self.cursor.lastrowid
if hasattr(icd, 'args'):
args = icd.args
for i in range(len(args)):
arg = args[i]
self.cursor.execute("INSERT INTO mysql_command_args (mysql_command, mysql_command_arg_index, mysql_command_arg_data) VALUES (?,?,?)",
(cmdid, i, arg))
self.dbh.commit()
def handle_incident_dionaea_modules_python_sip_command(self, icd):
con = icd.con
if con not in self.attacks:
return
def calc_allow(a):
b={ b'UNKNOWN' :(1<<0),
'ACK' :(1<<1),
'BYE' :(1<<2),
'CANCEL' :(1<<3),
'INFO' :(1<<4),
'INVITE' :(1<<5),
'MESSAGE' :(1<<6),
'NOTIFY' :(1<<7),
'OPTIONS' :(1<<8),
'PRACK' :(1<<9),
'PUBLISH' :(1<<10),
'REFER' :(1<<11),
'REGISTER' :(1<<12),
'SUBSCRIBE' :(1<<13),
'UPDATE' :(1<<14)
}
allow=0
for i in a:
if i in b:
allow |= b[i]
else:
allow |= b[b'UNKNOWN']
return allow
attackid = self.attacks[con][1]
self.cursor.execute("""INSERT INTO sip_commands
(connection, sip_command_method, sip_command_call_id,
sip_command_user_agent, sip_command_allow) VALUES (?,?,?,?,?)""",
(attackid, icd.method, icd.call_id, icd.user_agent, calc_allow(icd.allow)))
cmdid = self.cursor.lastrowid
def add_addr(cmd, _type, addr):
self.cursor.execute("""INSERT INTO sip_addrs
(sip_command, sip_addr_type, sip_addr_display_name,
sip_addr_uri_scheme, sip_addr_uri_user, sip_addr_uri_password,
sip_addr_uri_host, sip_addr_uri_port) VALUES (?,?,?,?,?,?,?,?)""",
(
cmd, _type, addr['display_name'],
addr['uri']['scheme'], addr['uri']['user'], addr['uri']['password'],
addr['uri']['host'], addr['uri']['port']
))
add_addr(cmdid,'addr',icd.get('addr'))
add_addr(cmdid,'to',icd.get('to'))
add_addr(cmdid,'contact',icd.get('contact'))
for i in icd.get('from'):
add_addr(cmdid,'from',i)
def add_via(cmd, via):
self.cursor.execute("""INSERT INTO sip_vias
(sip_command, sip_via_protocol, sip_via_address, sip_via_port)
VALUES (?,?,?,?)""",
(
cmd, via['protocol'],
via['address'], via['port']
))
for i in icd.get('via'):
add_via(cmdid, i)
def add_sdp(cmd, sdp):
def add_origin(cmd, o):
self.cursor.execute("""INSERT INTO sip_sdp_origins
(sip_command, sip_sdp_origin_username,
sip_sdp_origin_sess_id, sip_sdp_origin_sess_version,
sip_sdp_origin_nettype, sip_sdp_origin_addrtype,
sip_sdp_origin_unicast_address)
VALUES (?,?,?,?,?,?,?)""",
(
cmd, o['username'],
o['sess_id'], o['sess_version'],
o['nettype'], o['addrtype'],
o['unicast_address']
))
def add_condata(cmd, c):
self.cursor.execute("""INSERT INTO sip_sdp_connectiondatas
(sip_command, sip_sdp_connectiondata_nettype,
sip_sdp_connectiondata_addrtype, sip_sdp_connectiondata_connection_address,
sip_sdp_connectiondata_ttl, sip_sdp_connectiondata_number_of_addresses)
VALUES (?,?,?,?,?,?)""",
(
cmd, c['nettype'],
c['addrtype'], c['connection_address'],
c['ttl'], c['number_of_addresses']
))
def add_media(cmd, c):
self.cursor.execute("""INSERT INTO sip_sdp_medias
(sip_command, sip_sdp_media_media,
sip_sdp_media_port, sip_sdp_media_number_of_ports,
sip_sdp_media_proto)
VALUES (?,?,?,?,?)""",
(
cmd, c['media'],
c['port'], c['number_of_ports'],
c['proto']
))
if 'o' in sdp:
add_origin(cmd, sdp['o'])
if 'c' in sdp:
add_condata(cmd, sdp['c'])
if 'm' in sdp:
for i in sdp['m']:
add_media(cmd, i)
if hasattr(icd,'sdp') and icd.sdp is not None:
add_sdp(cmdid,icd.sdp)
self.dbh.commit()
| gpl-2.0 |
lisael/pg-django | tests/regressiontests/conditional_processing/models.py | 34 | 6931 | # -*- coding:utf-8 -*-
from datetime import datetime
from django.test import TestCase
from django.utils import unittest
from django.utils.http import parse_etags, quote_etag, parse_http_date
FULL_RESPONSE = 'Test conditional get response'
LAST_MODIFIED = datetime(2007, 10, 21, 23, 21, 47)
LAST_MODIFIED_STR = 'Sun, 21 Oct 2007 23:21:47 GMT'
LAST_MODIFIED_NEWER_STR = 'Mon, 18 Oct 2010 16:56:23 GMT'
LAST_MODIFIED_INVALID_STR = 'Mon, 32 Oct 2010 16:56:23 GMT'
EXPIRED_LAST_MODIFIED_STR = 'Sat, 20 Oct 2007 23:21:47 GMT'
ETAG = 'b4246ffc4f62314ca13147c9d4f76974'
EXPIRED_ETAG = '7fae4cd4b0f81e7d2914700043aa8ed6'
class ConditionalGet(TestCase):
urls = 'regressiontests.conditional_processing.urls'
def assertFullResponse(self, response, check_last_modified=True, check_etag=True):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, FULL_RESPONSE)
if check_last_modified:
self.assertEqual(response['Last-Modified'], LAST_MODIFIED_STR)
if check_etag:
self.assertEqual(response['ETag'], '"%s"' % ETAG)
def assertNotModified(self, response):
self.assertEqual(response.status_code, 304)
self.assertEqual(response.content, '')
def testWithoutConditions(self):
response = self.client.get('/condition/')
self.assertFullResponse(response)
def testIfModifiedSince(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_NEWER_STR
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_INVALID_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
def testIfNoneMatch(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
# Several etags in If-None-Match is a bit exotic but why not?
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s", "%s"' % (ETAG, EXPIRED_ETAG)
response = self.client.get('/condition/')
self.assertNotModified(response)
def testIfMatch(self):
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % ETAG
response = self.client.put('/condition/etag/', {'data': ''})
self.assertEqual(response.status_code, 200)
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.put('/condition/etag/', {'data': ''})
self.assertEqual(response.status_code, 412)
def testBothHeaders(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
def testSingleCondition1(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertNotModified(response)
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def testSingleCondition2(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/etag/')
self.assertNotModified(response)
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def testSingleCondition3(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def testSingleCondition4(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def testSingleCondition5(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified2/')
self.assertNotModified(response)
response = self.client.get('/condition/etag2/')
self.assertFullResponse(response, check_last_modified=False)
def testSingleCondition6(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/etag2/')
self.assertNotModified(response)
response = self.client.get('/condition/last_modified2/')
self.assertFullResponse(response, check_etag=False)
def testInvalidETag(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = r'"\"'
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
class ETagProcessing(unittest.TestCase):
def testParsing(self):
etags = parse_etags(r'"", "etag", "e\"t\"ag", "e\\tag", W/"weak"')
self.assertEqual(etags, ['', 'etag', 'e"t"ag', r'e\tag', 'weak'])
def testQuoting(self):
quoted_etag = quote_etag(r'e\t"ag')
self.assertEqual(quoted_etag, r'"e\\t\"ag"')
class HttpDateProcessing(unittest.TestCase):
def testParsingRfc1123(self):
parsed = parse_http_date('Sun, 06 Nov 1994 08:49:37 GMT')
self.assertEqual(datetime.utcfromtimestamp(parsed),
datetime(1994, 11, 06, 8, 49, 37))
def testParsingRfc850(self):
parsed = parse_http_date('Sunday, 06-Nov-94 08:49:37 GMT')
self.assertEqual(datetime.utcfromtimestamp(parsed),
datetime(1994, 11, 06, 8, 49, 37))
def testParsingAsctime(self):
parsed = parse_http_date('Sun Nov 6 08:49:37 1994')
self.assertEqual(datetime.utcfromtimestamp(parsed),
datetime(1994, 11, 06, 8, 49, 37))
| bsd-3-clause |
AlexHatesUnicorns/FDTD_Solver | world_surface/helpers/generate_surface.py | 2 | 1056 | import numpy as np
def generate_surface(y_size, delta_x, grain_size):
wing_size = 12 * grain_size
noise = np.random.random(y_size + wing_size) * 2 * delta_x - delta_x
noise = noise / np.max(noise) * delta_x
gauss = np.fromfunction(lambda i: np.exp(
(i - 3 * grain_size) / (2 * grain_size ^ 2)), (6 * grain_size,), dtype=int)
gauss_small = np.fromfunction(lambda i: np.exp(
(i - 9) / 8), (18,), dtype=int)
res = np.convolve(noise, gauss)
res = res / np.max(res)
res = np.convolve(res, gauss_small)
print(np.max(res), delta_x,
np.max(res[wing_size:y_size + wing_size] / np.max(res) * delta_x))
return res[wing_size:y_size + wing_size] / np.max(res) * delta_x
def check_surface(x, y, x_0, delta_x, surface, n_high):
if x_0 - delta_x < x < x_0 + delta_x:
if x > (x_0 + surface[y]):
return n_high
elif x < (x_0 + surface[y]):
return 1
else:
if x > x_0:
return n_high
elif x < x_0:
return 1
return 1
| mit |
shahbaz17/zamboni | sites/dev/settings_base.py | 6 | 5379 | """private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from mkt.settings import (CACHE_PREFIX, ES_INDEXES,
KNOWN_PROXIES, LOGGING, HOSTNAME)
from .. import splitstrip
import private_base as private
ALLOWED_HOSTS = ['.allizom.org', '.mozflare.net']
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['default']['ATOMIC_REQUESTS'] = True
DATABASES['default']['CONN_MAX_AGE'] = 5 * 60 # 5m for persistent connections.
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
DATABASES['slave']['ATOMIC_REQUESTS'] = True
DATABASES['slave']['CONN_MAX_AGE'] = 5 * 60 # 5m for persistent connections.
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
}
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
# Celery
BROKER_URL = private.BROKER_URL
CELERY_ALWAYS_EAGER = False
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
WEBSITE_ICONS_PATH = UPLOADS_PATH + '/website_icons'
FEATURED_APP_BG_PATH = UPLOADS_PATH + '/featured_app_background'
FEED_COLLECTION_BG_PATH = UPLOADS_PATH + '/feed_collection_background'
FEED_SHELF_BG_PATH = UPLOADS_PATH + '/feed_shelf_background'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
WEBAPP_PROMO_IMG_PATH = UPLOADS_PATH + '/webapp_promo_imgs'
WEBSITE_PROMO_IMG_PATH = UPLOADS_PATH + '/website_promo_imgs'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
LOGGING['loggers'].update({
'amqp': {'level': logging.WARNING},
'raven': {'level': logging.WARNING},
'requests': {'level': logging.WARNING},
'z.addons': {'level': logging.DEBUG},
'z.elasticsearch': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
'z.task': {'level': logging.DEBUG},
'z.users': {'level': logging.DEBUG},
})
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'mkt-dev'
ES_DEFAULT_NUM_REPLICAS = 2
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_dev' % v) for k, v in ES_INDEXES.items())
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = False
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
CLEANCSS_BIN = 'cleancss'
LESS_BIN = 'lessc'
STYLUS_BIN = 'stylus'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 540
VALIDATOR_TIMEOUT = 180
LESS_PREPROCESS = True
XSENDFILE = True
ALLOW_SELF_REVIEWS = True
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
MONOLITH_SERVER = 'https://monolith-dev.allizom.org'
GEOIP_URL = 'https://geo-dev-marketplace.allizom.org'
AWS_ACCESS_KEY_ID = private.AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY = private.AWS_SECRET_ACCESS_KEY
AWS_STORAGE_BUCKET_NAME = private.AWS_STORAGE_BUCKET_NAME
RAISE_ON_SIGNAL_ERROR = True
API_THROTTLE = False
NEWRELIC_ENABLED_LIST = ['dev1.addons.phx1.mozilla.com',
'dev2.addons.phx1.mozilla.com']
NEWRELIC_ENABLE = HOSTNAME in NEWRELIC_ENABLED_LIST
AES_KEYS = private.AES_KEYS
TASK_USER_ID = 4757633
SERVE_TMP_PATH = False
| bsd-3-clause |
hgl888/chromium-crosswalk | tools/telemetry/telemetry/value/trace.py | 4 | 4900 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import logging
import os
import random
import shutil
import StringIO
import sys
import tempfile
from catapult_base import cloud_storage
from telemetry.core import util
from telemetry.internal.util import file_handle
from telemetry.timeline import trace_data as trace_data_module
from telemetry import value as value_module
# Bring in tv module for transforming raw trace to html form.
util.AddDirToPythonPath(
util.GetChromiumSrcDir(), 'third_party', 'trace-viewer')
from tracing.build import trace2html # pylint:disable=import-error
class TraceValue(value_module.Value):
def __init__(self, page, trace_data, important=False, description=None):
"""A value that contains a TraceData object and knows how to
output it.
Adding TraceValues and outputting as JSON will produce a directory full of
HTML files called trace_files. Outputting as chart JSON will also produce
an index, files.html, linking to each of these files.
"""
super(TraceValue, self).__init__(
page, name='trace', units='', important=important,
description=description, tir_label=None)
self._temp_file = self._GetTempFileHandle(trace_data)
self._cloud_url = None
self._serialized_file_handle = None
def _GetTempFileHandle(self, trace_data):
if self.page:
title = self.page.display_name
else:
title = ''
content = StringIO.StringIO()
trace2html.WriteHTMLForTraceDataToFile(
[trace_data.GetEventsFor(trace_data_module.CHROME_TRACE_PART)],
title,
content)
tf = tempfile.NamedTemporaryFile(delete=False, suffix='.html')
tf.write(content.getvalue().encode('utf-8'))
tf.close()
return file_handle.FromTempFile(tf)
def __repr__(self):
if self.page:
page_name = self.page.display_name
else:
page_name = 'None'
return 'TraceValue(%s, %s)' % (page_name, self.name)
def CleanUp(self):
"""Cleans up tempfile after it is no longer needed.
A cleaned up TraceValue cannot be used for further operations. CleanUp()
may be called more than once without error.
"""
if self._temp_file is None:
return
os.remove(self._temp_file.GetAbsPath())
self._temp_file = None
def __enter__(self):
return self
def __exit__(self, _, __, ___):
self.CleanUp()
@property
def cleaned_up(self):
return self._temp_file is None
def GetBuildbotDataType(self, output_context):
return None
def GetBuildbotValue(self):
return None
def GetRepresentativeNumber(self):
return None
def GetRepresentativeString(self):
return None
@staticmethod
def GetJSONTypeName():
return 'trace'
@classmethod
def MergeLikeValuesFromSamePage(cls, values):
# TODO(eakuefner): Implement a MultiTraceValue: a Polymer-based,
# componentized, MultiTraceViwer-backed representation of more than one
# trace.
assert len(values) > 0
return values[0]
@classmethod
def MergeLikeValuesFromDifferentPages(cls, values):
return None
def AsDict(self):
if self._temp_file is None:
raise ValueError('Tried to serialize TraceValue without tempfile.')
d = super(TraceValue, self).AsDict()
if self._serialized_file_handle:
d['file_id'] = self._serialized_file_handle.id
if self._cloud_url:
d['cloud_url'] = self._cloud_url
return d
def Serialize(self, dir_path):
if self._temp_file is None:
raise ValueError('Tried to serialize nonexistent trace.')
file_name = str(self._temp_file.id) + self._temp_file.extension
file_path = os.path.abspath(os.path.join(dir_path, file_name))
shutil.copy(self._temp_file.GetAbsPath(), file_path)
self._serialized_file_handle = file_handle.FromFilePath(file_path)
return self._serialized_file_handle
def UploadToCloud(self, bucket):
if self._temp_file is None:
raise ValueError('Tried to upload nonexistent trace to Cloud Storage.')
try:
if self._serialized_file_handle:
fh = self._serialized_file_handle
else:
fh = self._temp_file
remote_path = ('trace-file-id_%s-%s-%d%s' % (
fh.id,
datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'),
random.randint(1, 100000),
fh.extension))
self._cloud_url = cloud_storage.Insert(
bucket, remote_path, fh.GetAbsPath())
sys.stderr.write(
'View generated trace files online at %s for page %s\n' %
(self._cloud_url, self.page.url if self.page else 'unknown'))
return self._cloud_url
except cloud_storage.PermissionError as e:
logging.error('Cannot upload trace files to cloud storage due to '
' permission error: %s' % e.message)
| bsd-3-clause |
UniversalMasterEgg8679/ansible | lib/ansible/plugins/action/win_copy.py | 185 | 1153 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.plugins.action.copy import ActionModule as CopyActionModule
# Even though CopyActionModule inherits from ActionBase, we still need to
# directly inherit from ActionBase to appease the plugin loader.
class ActionModule(CopyActionModule, ActionBase):
pass
| gpl-3.0 |
antoyo/qutebrowser | qutebrowser/utils/jinja.py | 4 | 2886 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2016 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Utilities related to jinja2."""
import os
import os.path
import traceback
import jinja2
import jinja2.exceptions
from qutebrowser.utils import utils, urlutils, log
from PyQt5.QtCore import QUrl
class Loader(jinja2.BaseLoader):
"""Jinja loader which uses utils.read_file to load templates.
Attributes:
_subdir: The subdirectory to find templates in.
"""
def __init__(self, subdir):
self._subdir = subdir
def get_source(self, _env, template):
path = os.path.join(self._subdir, template)
try:
source = utils.read_file(path)
except OSError:
raise jinja2.TemplateNotFound(template)
# Currently we don't implement auto-reloading, so we always return True
# for up-to-date.
return source, path, lambda: True
def _guess_autoescape(template_name):
"""Turn auto-escape on/off based on the file type.
Based on http://jinja.pocoo.org/docs/dev/api/#autoescaping
"""
if template_name is None or '.' not in template_name:
return False
ext = template_name.rsplit('.', 1)[1]
return ext in ['html', 'htm', 'xml']
def resource_url(path):
"""Load images from a relative path (to qutebrowser).
Arguments:
path: The relative path to the image
"""
image = utils.resource_filename(path)
return QUrl.fromLocalFile(image).toString(QUrl.FullyEncoded)
def render(template, **kwargs):
"""Render the given template and pass the given arguments to it."""
try:
return _env.get_template(template).render(**kwargs)
except jinja2.exceptions.UndefinedError:
log.misc.exception("UndefinedError while rendering " + template)
err_path = os.path.join('html', 'undef_error.html')
err_template = utils.read_file(err_path)
tb = traceback.format_exc()
return err_template.format(pagename=template, traceback=tb)
_env = jinja2.Environment(loader=Loader('html'), autoescape=_guess_autoescape)
_env.globals['resource_url'] = resource_url
_env.globals['file_url'] = urlutils.file_url
| gpl-3.0 |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/boto-2.38.0/boto/plugin.py | 150 | 2711 | # Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Implements plugin related api.
To define a new plugin just subclass Plugin, like this.
class AuthPlugin(Plugin):
pass
Then start creating subclasses of your new plugin.
class MyFancyAuth(AuthPlugin):
capability = ['sign', 'vmac']
The actual interface is duck typed.
"""
import glob
import imp
import os.path
class Plugin(object):
"""Base class for all plugins."""
capability = []
@classmethod
def is_capable(cls, requested_capability):
"""Returns true if the requested capability is supported by this plugin
"""
for c in requested_capability:
if c not in cls.capability:
return False
return True
def get_plugin(cls, requested_capability=None):
if not requested_capability:
requested_capability = []
result = []
for handler in cls.__subclasses__():
if handler.is_capable(requested_capability):
result.append(handler)
return result
def _import_module(filename):
(path, name) = os.path.split(filename)
(name, ext) = os.path.splitext(name)
(file, filename, data) = imp.find_module(name, [path])
try:
return imp.load_module(name, file, filename, data)
finally:
if file:
file.close()
_plugin_loaded = False
def load_plugins(config):
global _plugin_loaded
if _plugin_loaded:
return
_plugin_loaded = True
if not config.has_option('Plugin', 'plugin_directory'):
return
directory = config.get('Plugin', 'plugin_directory')
for file in glob.glob(os.path.join(directory, '*.py')):
_import_module(file)
| mit |
skomski/duktape | src/genstrings.py | 9 | 36208 | #!/usr/bin/python
#
# Generate a list of built-in strings required by Duktape code, output
# duk_strings.h (defines) and duk_strings.c (string data). Raw string
# data is also written to duk_strings.bin.
#
# These strings may be required by execution and/or compilation, or
# built-in code. Strings are included here when it benefits footprint.
# These strings are currently interned although strings needed only by
# the compiler do not strictly need to be. Strings need to be ordered
# so that reserved words are in a certain range (with strict reserved
# words grouped together).
# XXX: integrate more tightly with genbuiltins.py
# XXX: add code to indicate strings which are needed at runtime
# (may be profile dependent); then detect which strings
# genbuiltins.py needs, and finally log unused strings
# Perhaps string lists need to be added programmatically and
# may be omitted based on profile
# XXX: avoid debug-related strings in release build (same applies to
# other configuration dependent strings, like traceback data)
# XXX: better compression
# XXX: reserved word stridx's could be made to match token numbers
# directly so that a duk_stridx2token[] would not be needed
# XXX: improve per string metadata, and sort strings within constraints
# XXX: some Duktape internal strings could just reuse existing strings
import os
import sys
import optparse
import dukutil
# Prefix for defines
define_prefix = 'DUK_STRIDX_'
#
# String lists
#
# Some strings may appear in multiple lists and even in multiple roles.
#
# XXX: currently the keywords are not recorded; use them later to organize
# strings more optimally
class BuiltinString:
name = None
section_b = None
browser_like = None
es6 = None
typedarray = None
nodejs_buffer = None
custom = None
internal = None
reserved_word = None
future_reserved_word = None
future_reserved_word_strict = None
special_literal = None
class_name = None
# computed
req_8bit = None
def __init__(self):
pass
def mkstr(x,
section_b=False,
browser_like=False,
es6=False,
typedarray=False,
nodejs_buffer=False,
commonjs=False,
custom=False,
internal=False,
reserved_word=False,
future_reserved_word=False,
future_reserved_word_strict=False,
special_literal=False,
class_name=False):
"Create a string object."
# A 0xff prefix (never part of valid UTF-8) is used for internal properties.
# It is encoded as 0x00 in generated init data for technical reasons: it
# keeps lookup table elements 7 bits instead of 8 bits. The initial byte
# of a Duktape internal string is always capitalized (e.g. \x00Value) so
# that user code can use clean lowercase prefixes like "\xFFptr".
if internal:
if len(x) < 1 or not (ord(x[0]) >= ord('A') and ord(x[0]) <= ord('Z')):
raise Exception('invalid internal key: %s' % repr(x))
x = '\x00' + x
ret = BuiltinString()
ret.name = x
ret.section_b = section_b
ret.browser_like = browser_like
ret.es6 = es6
ret.typedarray = typedarray
ret.nodejs_buffer = nodejs_buffer
ret.commonjs = commonjs
ret.custom = custom
ret.internal = internal
ret.reserved_word = reserved_word
ret.future_reserved_word = future_reserved_word
ret.future_reserved_word_strict = future_reserved_word_strict
ret.special_literal = special_literal
ret.class_name = class_name
ret.req_8bit = False
if class_name:
ret.req_8bit = True
return ret
# Standard built-in object related strings
standard_builtin_string_list = [
# internal class values
mkstr("Undefined", class_name=True), # sort of
mkstr("Null", class_name=True), # sort of
mkstr("Object", class_name=True),
mkstr("Function", class_name=True),
mkstr("Array", class_name=True),
mkstr("String", class_name=True),
mkstr("Boolean", class_name=True),
mkstr("Number", class_name=True),
mkstr("Date", class_name=True),
mkstr("RegExp", class_name=True),
mkstr("Error", class_name=True),
mkstr("Math", class_name=True),
mkstr("JSON", class_name=True),
mkstr("Arguments", class_name=True),
# built-in object names
mkstr("Object"),
mkstr("Function"),
mkstr("Array"),
mkstr("String"),
mkstr("Boolean"),
mkstr("Number"),
mkstr("Date"),
mkstr("RegExp"),
mkstr("Error"),
mkstr("EvalError"),
mkstr("RangeError"),
mkstr("ReferenceError"),
mkstr("SyntaxError"),
mkstr("TypeError"),
mkstr("URIError"),
mkstr("Math"),
mkstr("JSON"),
# Global object
mkstr("eval"),
mkstr("parseInt"),
mkstr("parseFloat"),
mkstr("isNaN"),
mkstr("isFinite"),
mkstr("decodeURI"),
mkstr("decodeURIComponent"),
mkstr("encodeURI"),
mkstr("encodeURIComponent"),
mkstr("escape", section_b=True),
mkstr("unescape", section_b=True),
mkstr("print", browser_like=True),
mkstr("alert", browser_like=True),
# Object constructor
mkstr("length"),
mkstr("prototype"),
mkstr("getPrototypeOf"),
mkstr("getOwnPropertyDescriptor"),
mkstr("getOwnPropertyNames"),
mkstr("create"),
mkstr("defineProperty"),
mkstr("defineProperties"),
mkstr("seal"),
mkstr("freeze"),
mkstr("preventExtensions"),
mkstr("isSealed"),
mkstr("isFrozen"),
mkstr("isExtensible"),
mkstr("keys"),
# Property descriptors
mkstr("value"),
mkstr("writable"),
mkstr("configurable"),
mkstr("enumerable"),
mkstr("get"),
mkstr("set"),
# Object prototype
mkstr("constructor"),
mkstr("toString"),
mkstr("toLocaleString"),
mkstr("valueOf"),
mkstr("hasOwnProperty"),
mkstr("isPrototypeOf"),
mkstr("propertyIsEnumerable"),
# Object instances
# no special properties
# Function constructor
mkstr("length"),
mkstr("prototype"),
# Function prototype
mkstr("constructor"),
mkstr("toString"),
mkstr("apply"),
mkstr("call"),
mkstr("bind"),
# Function instances
mkstr("length"),
mkstr("prototype"),
mkstr("caller"), # for bind() generated instances
mkstr("arguments"), # for bind() generated instances
# Array constructor
mkstr("length"),
mkstr("prototype"),
mkstr("isArray"),
# Array prototype
mkstr("constructor"),
mkstr("toString"),
mkstr("toLocaleString"),
mkstr("concat"),
mkstr("join"),
mkstr("pop"),
mkstr("push"),
mkstr("reverse"),
mkstr("shift"),
mkstr("slice"),
mkstr("sort"),
mkstr("splice"),
mkstr("unshift"),
mkstr("indexOf"),
mkstr("lastIndexOf"),
mkstr("every"),
mkstr("some"),
mkstr("forEach"),
mkstr("map"),
mkstr("filter"),
mkstr("reduce"),
mkstr("reduceRight"),
# Array instances
mkstr("length"),
# String constructor
mkstr("length"),
mkstr("prototype"),
mkstr("fromCharCode"),
# String prototype
mkstr("constructor"),
mkstr("toString"),
mkstr("valueOf"),
mkstr("charAt"),
mkstr("charCodeAt"),
mkstr("concat"),
mkstr("indexOf"),
mkstr("lastIndexOf"),
mkstr("localeCompare"),
mkstr("match"),
mkstr("replace"),
mkstr("search"),
mkstr("slice"),
mkstr("split"),
mkstr("substring"),
mkstr("toLowerCase"),
mkstr("toLocaleLowerCase"),
mkstr("toUpperCase"),
mkstr("toLocaleUpperCase"),
mkstr("trim"),
mkstr("substr", section_b=True),
# String instances
mkstr("length"),
# Boolean constructor
mkstr("length"),
mkstr("prototype"),
# Boolean prototype
mkstr("constructor"),
mkstr("toString"),
mkstr("valueOf"),
# Boolean instances
# no special properties
# Number constructor
mkstr("length"),
mkstr("prototype"),
mkstr("MAX_VALUE"),
mkstr("MIN_VALUE"),
mkstr("NaN"),
mkstr("NEGATIVE_INFINITY"),
mkstr("POSITIVE_INFINITY"),
# Number prototype
mkstr("constructor"),
mkstr("toString"),
mkstr("toLocaleString"),
mkstr("valueOf"),
mkstr("toFixed"),
mkstr("toExponential"),
mkstr("toPrecision"),
# Number instances
# no special properties
# Date constructor
mkstr("length"),
mkstr("prototype"),
mkstr("parse"),
mkstr("UTC"),
mkstr("now"),
# Date prototype
mkstr("constructor"),
mkstr("toString"),
mkstr("toDateString"),
mkstr("toTimeString"),
mkstr("toLocaleString"),
mkstr("toLocaleDateString"),
mkstr("toLocaleTimeString"),
mkstr("valueOf"),
mkstr("getTime"),
mkstr("getFullYear"),
mkstr("getUTCFullYear"),
mkstr("getMonth"),
mkstr("getUTCMonth"),
mkstr("getDate"),
mkstr("getUTCDate"),
mkstr("getDay"),
mkstr("getUTCDay"),
mkstr("getHours"),
mkstr("getUTCHours"),
mkstr("getMinutes"),
mkstr("getUTCMinutes"),
mkstr("getSeconds"),
mkstr("getUTCSeconds"),
mkstr("getMilliseconds"),
mkstr("getUTCMilliseconds"),
mkstr("getTimezoneOffset"),
mkstr("setTime"),
mkstr("setMilliseconds"),
mkstr("setUTCMilliseconds"),
mkstr("setSeconds"),
mkstr("setUTCSeconds"),
mkstr("setMinutes"),
mkstr("setUTCMinutes"),
mkstr("setHours"),
mkstr("setUTCHours"),
mkstr("setDate"),
mkstr("setUTCDate"),
mkstr("setMonth"),
mkstr("setUTCMonth"),
mkstr("setFullYear"),
mkstr("setUTCFullYear"),
mkstr("toUTCString"),
mkstr("toISOString"),
mkstr("toJSON"),
mkstr("getYear", section_b=True),
mkstr("setYear", section_b=True),
mkstr("toGMTString", section_b=True),
# Date instances
# no special properties
# RegExp constructor
mkstr("length"),
mkstr("prototype"),
# RegExp prototype
mkstr("constructor"),
mkstr("exec"),
mkstr("test"),
mkstr("toString"),
# RegExp instances
mkstr("source"),
mkstr("global"),
mkstr("ignoreCase"),
mkstr("multiline"),
mkstr("lastIndex"),
mkstr("(?:)"),
# RegExp exec() results
mkstr("index"),
mkstr("input"),
# Error constructor
mkstr("length"),
mkstr("prototype"),
# Error prototype
mkstr("constructor"),
mkstr("name"),
mkstr("message"),
mkstr("toString"),
# Error instances
# no special properties
# Error prototype / error fields (apply to all native errors in the spec)
mkstr("name"),
mkstr("message"),
# Math object
mkstr("E"),
mkstr("LN10"),
mkstr("LN2"),
mkstr("LOG2E"),
mkstr("LOG10E"),
mkstr("PI"),
mkstr("SQRT1_2"),
mkstr("SQRT2"),
mkstr("abs"),
mkstr("acos"),
mkstr("asin"),
mkstr("atan"),
mkstr("atan2"),
mkstr("ceil"),
mkstr("cos"),
mkstr("exp"),
mkstr("floor"),
mkstr("log"),
mkstr("max"),
mkstr("min"),
mkstr("pow"),
mkstr("random"),
mkstr("round"),
mkstr("sin"),
mkstr("sqrt"),
mkstr("tan"),
# JSON object
mkstr("parse"),
mkstr("stringify"),
]
# Other standard related strings
standard_other_string_list = [
# typeof - these produce unfortunate naming conflicts like "Object" vs "object"
mkstr("undefined"),
mkstr("boolean"),
mkstr("number"),
mkstr("string"),
mkstr("object"), # also returned for typeof null
mkstr("function"),
# type related
mkstr("undefined"),
mkstr("null"),
mkstr("true"),
mkstr("false"),
# special values
mkstr("length"),
mkstr("NaN"),
mkstr("Infinity"),
mkstr("+Infinity"),
mkstr("-Infinity"),
mkstr("0"),
mkstr("+0"),
mkstr("-0"),
mkstr("", class_name=True), # used as a class name for unused/invalid class
mkstr(","), # for array joining
mkstr(" "), # for print()
mkstr("\n\t"), # for tracebacks
mkstr("[...]"), # for tracebacks
mkstr("Invalid Date"), # for invalid Date instances
# arguments object (E5 Section 10.6)
mkstr("arguments"),
mkstr("callee"),
mkstr("caller"),
# "set" and "get" are strings we need in object literals but they are not
# ReservedWords.
mkstr("get"),
mkstr("set"),
]
# ES6 specific strings
es6_string_list = [
mkstr("Proxy", es6=True),
#mkstr("revocable", es6=True),
# Proxy trap names
mkstr("has", es6=True),
mkstr("set", es6=True),
mkstr("get", es6=True),
mkstr("deleteProperty", es6=True),
mkstr("enumerate", es6=True),
mkstr("ownKeys", es6=True),
mkstr("setPrototypeOf", es6=True),
mkstr("__proto__", es6=True),
]
# CommonJS related strings
commonjs_string_list = [
mkstr("require", commonjs=True),
mkstr("id", commonjs=True),
mkstr("exports", commonjs=True),
]
# Node.js Buffer / TypedArray related strings
buffer_string_list = [
# Node.js class
mkstr("Buffer", class_name=True, nodejs_buffer=True),
# Node.js Buffer constructor
mkstr("concat", nodejs_buffer=True),
mkstr("isEncoding", nodejs_buffer=True),
mkstr("isBuffer", nodejs_buffer=True),
mkstr("byteLength", nodejs_buffer=True),
mkstr("compare", nodejs_buffer=True),
# Node.js Buffer prototype
mkstr("toString", nodejs_buffer=True),
mkstr("toJSON", nodejs_buffer=True),
mkstr("write", nodejs_buffer=True),
mkstr("fill", nodejs_buffer=True),
mkstr("equals", nodejs_buffer=True),
mkstr("compare", nodejs_buffer=True),
mkstr("copy", nodejs_buffer=True),
mkstr("slice", nodejs_buffer=True),
mkstr("readUInt8", nodejs_buffer=True),
mkstr("readInt8", nodejs_buffer=True),
mkstr("readUInt16LE", nodejs_buffer=True),
mkstr("readUInt16BE", nodejs_buffer=True),
mkstr("readInt16LE", nodejs_buffer=True),
mkstr("readInt16BE", nodejs_buffer=True),
mkstr("readUInt32LE", nodejs_buffer=True),
mkstr("readUInt32BE", nodejs_buffer=True),
mkstr("readInt32LE", nodejs_buffer=True),
mkstr("readInt32BE", nodejs_buffer=True),
mkstr("readFloatLE", nodejs_buffer=True),
mkstr("readFloatBE", nodejs_buffer=True),
mkstr("readDoubleLE", nodejs_buffer=True),
mkstr("readDoubleBE", nodejs_buffer=True),
mkstr("readUIntLE", nodejs_buffer=True),
mkstr("readUIntBE", nodejs_buffer=True),
mkstr("readIntLE", nodejs_buffer=True),
mkstr("readIntBE", nodejs_buffer=True),
mkstr("writeUInt8", nodejs_buffer=True),
mkstr("writeInt8", nodejs_buffer=True),
mkstr("writeUInt16LE", nodejs_buffer=True),
mkstr("writeUInt16BE", nodejs_buffer=True),
mkstr("writeInt16LE", nodejs_buffer=True),
mkstr("writeInt16BE", nodejs_buffer=True),
mkstr("writeUInt32LE", nodejs_buffer=True),
mkstr("writeUInt32BE", nodejs_buffer=True),
mkstr("writeInt32LE", nodejs_buffer=True),
mkstr("writeInt32BE", nodejs_buffer=True),
mkstr("writeFloatLE", nodejs_buffer=True),
mkstr("writeFloatBE", nodejs_buffer=True),
mkstr("writeDoubleLE", nodejs_buffer=True),
mkstr("writeDoubleBE", nodejs_buffer=True),
mkstr("writeUIntLE", nodejs_buffer=True),
mkstr("writeUIntBE", nodejs_buffer=True),
mkstr("writeIntLE", nodejs_buffer=True),
mkstr("writeIntBE", nodejs_buffer=True),
# Node.js toJSON()
mkstr("type", nodejs_buffer=True),
mkstr("data", nodejs_buffer=True),
# TypedArray classes
mkstr("ArrayBuffer", class_name=True, typedarray=True),
mkstr("DataView", class_name=True, typedarray=True),
mkstr("Int8Array", class_name=True, typedarray=True),
mkstr("Uint8Array", class_name=True, typedarray=True),
mkstr("Uint8ClampedArray", class_name=True, typedarray=True),
mkstr("Int16Array", class_name=True, typedarray=True),
mkstr("Uint16Array", class_name=True, typedarray=True),
mkstr("Int32Array", class_name=True, typedarray=True),
mkstr("Uint32Array", class_name=True, typedarray=True),
mkstr("Float32Array", class_name=True, typedarray=True),
mkstr("Float64Array", class_name=True, typedarray=True),
# TypedArray ArrayBuffer constructor
mkstr("isView", typedarray=True),
# TypedArray ArrayBuffer instance
mkstr("slice", typedarray=True),
# TypedArray ArrayBufferView shared
mkstr("buffer", typedarray=True),
mkstr("length", typedarray=True),
mkstr("byteLength", typedarray=True),
mkstr("byteOffset", typedarray=True),
mkstr("BYTES_PER_ELEMENT", typedarray=True),
# TypedArray TypedArray (e.g. Uint8Array)
mkstr("set", typedarray=True),
mkstr("subarray", typedarray=True),
# TypedArray DataView
mkstr("getInt8", typedarray=True),
mkstr("getUint8", typedarray=True),
mkstr("getInt16", typedarray=True),
mkstr("getUint16", typedarray=True),
mkstr("getInt32", typedarray=True),
mkstr("getUint32", typedarray=True),
mkstr("getFloat32", typedarray=True),
mkstr("getFloat64", typedarray=True),
mkstr("setInt8", typedarray=True),
mkstr("setUint8", typedarray=True),
mkstr("setInt16", typedarray=True),
mkstr("setUint16", typedarray=True),
mkstr("setInt32", typedarray=True),
mkstr("setUint32", typedarray=True),
mkstr("setFloat32", typedarray=True),
mkstr("setFloat64", typedarray=True),
]
# Duktape specific strings
duk_string_list = [
# non-standard global properties
mkstr("Duktape", custom=True),
# non-standard class values
mkstr("global", custom=True, class_name=True), # implementation specific but shared by e.g. smjs and V8
mkstr("ObjEnv", custom=True, class_name=True),
mkstr("DecEnv", custom=True, class_name=True),
mkstr("Buffer", custom=True, class_name=True),
mkstr("Pointer", custom=True, class_name=True),
mkstr("Thread", custom=True, class_name=True),
mkstr("Logger", custom=True, class_name=True),
# non-standard built-in object names
mkstr("ThrowTypeError", custom=True), # implementation specific, matches V8
# non-standard error object (or Error.prototype) properties
mkstr("stack", custom=True),
mkstr("pc", custom=True),
mkstr("fileName", custom=True),
mkstr("lineNumber", custom=True),
#mkstr("code", custom=True),
mkstr("Tracedata", internal=True, custom=True),
# non-standard function instance properties
mkstr("name", custom=True), # function declaration/expression name (or empty)
mkstr("fileName", custom=True), # filename associated with function (shown in tracebacks)
# typeof - these produce unfortunate naming conflicts like "Object" vs "object"
mkstr("buffer", custom=True),
mkstr("pointer", custom=True),
# internal property for primitive value (Boolean, Number, String)
mkstr("Value", internal=True, custom=True),
# internal properties for enumerator objects
mkstr("Target", internal=True, custom=True),
mkstr("Next", internal=True, custom=True),
# internal properties for RegExp instances
mkstr("Bytecode", internal=True, custom=True),
# internal properties for function objects
mkstr("Formals", internal=True, custom=True),
mkstr("Varmap", internal=True, custom=True),
mkstr("Lexenv", internal=True, custom=True),
mkstr("Varenv", internal=True, custom=True),
mkstr("Source", internal=True, custom=True),
mkstr("Pc2line", internal=True, custom=True),
# internal properties for thread objects
# internal properties for bound function objects
mkstr("Target", internal=True, custom=True), # [[TargetFunction]]
mkstr("This", internal=True, custom=True), # [[BoundThis]]
mkstr("Args", internal=True, custom=True), # [[BoundArguments]]
# internal properties for argument objects
mkstr("Map", internal=True, custom=True),
mkstr("Callee", internal=True, custom=True),
# internal properties for general objects
#mkstr("Metatable", internal=True, custom=True),
mkstr("Finalizer", internal=True, custom=True),
# internal properties for Proxy objects
mkstr("Target", internal=True, custom=True), # [[ProxyTarget]]
mkstr("Handler", internal=True, custom=True), # [[ProxyHandler]]
# internal properties for declarative environment records
mkstr("Callee", internal=True, custom=True), # to access varmap
mkstr("Thread", internal=True, custom=True), # to identify valstack
mkstr("Regbase", internal=True, custom=True), # to determine absolute valstack index
# internal properties for object environment records
mkstr("Target", internal=True, custom=True), # target object
mkstr("This", internal=True, custom=True), # implicit this binding value
# fake filename for compiled functions
mkstr("compile", custom=True), # used as a filename for functions created with Function constructor
mkstr("input", custom=True), # used as a filename for eval temp function
# Duktape object
mkstr("errCreate", custom=True),
mkstr("errThrow", custom=True),
mkstr("modSearch", custom=True),
mkstr("modLoaded", custom=True),
mkstr("env", custom=True),
mkstr("version", custom=True),
mkstr("info", custom=True),
mkstr("act", custom=True),
mkstr("gc", custom=True),
mkstr("fin", custom=True),
mkstr("enc", custom=True),
mkstr("dec", custom=True),
mkstr("hex", custom=True), # enc/dec alg
mkstr("base64", custom=True), # enc/dec alg
mkstr("jx", custom=True), # enc/dec alg
mkstr("jc", custom=True), # enc/dec alg
mkstr("compact", custom=True),
# Buffer constructor
# Buffer prototype
# Pointer constructor
# Pointer prototype
# Thread constructor
mkstr("yield", custom=True),
mkstr("resume", custom=True),
mkstr("current", custom=True),
# Thread prototype
# Logger constructor
# Logger prototype and logger instances
mkstr("fmt", custom=True),
mkstr("raw", custom=True),
mkstr("trace", custom=True),
mkstr("debug", custom=True),
mkstr("info", custom=True),
mkstr("warn", custom=True),
mkstr("error", custom=True),
mkstr("fatal", custom=True),
mkstr("n", custom=True),
mkstr("l", custom=True),
# Auxiliary logger strings
mkstr("clog", custom=True), # C logger
# for controlling log formatting of objects
mkstr("toLogString", custom=True),
# special literals for custom json encodings
mkstr('{"_undef":true}', custom=True),
mkstr('{"_nan":true}', custom=True),
mkstr('{"_inf":true}', custom=True),
mkstr('{"_ninf":true}', custom=True),
mkstr('{"_func":true}', custom=True),
mkstr('{_func:true}', custom=True),
]
# Standard reserved words (non-strict mode + strict mode)
# Note: order must match DUK_TOK_XXX reserved defines in duk_types.h
standard_reserved_words_list = [
# E5 Section 7.6.1
# Keyword
mkstr("break", reserved_word=True),
mkstr("case", reserved_word=True),
mkstr("catch", reserved_word=True),
mkstr("continue", reserved_word=True),
mkstr("debugger", reserved_word=True),
mkstr("default", reserved_word=True),
mkstr("delete", reserved_word=True),
mkstr("do", reserved_word=True),
mkstr("else", reserved_word=True),
mkstr("finally", reserved_word=True),
mkstr("for", reserved_word=True),
mkstr("function", reserved_word=True),
mkstr("if", reserved_word=True),
mkstr("in", reserved_word=True),
mkstr("instanceof", reserved_word=True),
mkstr("new", reserved_word=True),
mkstr("return", reserved_word=True),
mkstr("switch", reserved_word=True),
mkstr("this", reserved_word=True),
mkstr("throw", reserved_word=True),
mkstr("try", reserved_word=True),
mkstr("typeof", reserved_word=True),
mkstr("var", reserved_word=True),
mkstr("void", reserved_word=True),
mkstr("while", reserved_word=True),
mkstr("with", reserved_word=True),
# Future reserved word
mkstr("class", reserved_word=True, future_reserved_word=True),
mkstr("const", reserved_word=True, future_reserved_word=True),
mkstr("enum", reserved_word=True, future_reserved_word=True),
mkstr("export", reserved_word=True, future_reserved_word=True),
mkstr("extends", reserved_word=True, future_reserved_word=True),
mkstr("import", reserved_word=True, future_reserved_word=True),
mkstr("super", reserved_word=True, future_reserved_word=True),
# E5 Section 7.8.1 and 7.8.2: special literals which the lexer
# basically treats like keywords
mkstr("null", special_literal=True),
mkstr("true", special_literal=True),
mkstr("false", special_literal=True),
# "set" and "get" are *NOT* reserved words and there is even code
# in the wild with statements like 'var set = 1;'. They are thus
# treated as ordinary identifiers and recognized by the compiler
# as tokens in a special way.
#mkstr("get"),
#mkstr("set"),
]
# Standard reserved words (strict mode only)
# Note: order must match DUK_TOK_XXX reserved defines in duk_types.h
standard_reserved_words_strict_string_list = [
# Future reserved word (additionally in strict mode)
mkstr("implements", reserved_word=True, future_reserved_word_strict=True),
mkstr("interface", reserved_word=True, future_reserved_word_strict=True),
mkstr("let", reserved_word=True, future_reserved_word_strict=True),
mkstr("package", reserved_word=True, future_reserved_word_strict=True),
mkstr("private", reserved_word=True, future_reserved_word_strict=True),
mkstr("protected", reserved_word=True, future_reserved_word_strict=True),
mkstr("public", reserved_word=True, future_reserved_word_strict=True),
mkstr("static", reserved_word=True, future_reserved_word_strict=True),
mkstr("yield", reserved_word=True, future_reserved_word_strict=True),
]
#
# Forced define names for specific strings for which automatic name generation
# does a bad job.
#
special_define_names = {
# typeof has name conflicts like "object" and "Object", broken with
# these unfortunately hacky defines
'undefined': 'LC_UNDEFINED',
'Undefined': 'UC_UNDEFINED',
'null': 'LC_NULL',
'Null': 'UC_NULL',
'object': 'LC_OBJECT',
'Object': 'UC_OBJECT',
'boolean': 'LC_BOOLEAN',
'Boolean': 'UC_BOOLEAN',
'number': 'LC_NUMBER',
'Number': 'UC_NUMBER',
'function': 'LC_FUNCTION',
'Function': 'UC_FUNCTION',
'string': 'LC_STRING',
'String': 'UC_STRING',
'arguments': 'LC_ARGUMENTS',
'Arguments': 'UC_ARGUMENTS',
'buffer': 'LC_BUFFER',
'Buffer': 'UC_BUFFER',
'pointer': 'LC_POINTER',
'Pointer': 'UC_POINTER',
#'thread': 'LC_THREAD',
'Thread': 'UC_THREAD',
#'logger': 'LC_LOGGER',
'Logger': 'UC_LOGGER',
'n': 'LC_N',
'l': 'LC_L',
'error': 'LC_ERROR',
'Error': 'UC_ERROR',
# log levels
'trace': 'LC_TRACE',
#'Trace': 'UC_TRACE',
'debug': 'LC_DEBUG',
#'Debug': 'UC_DEBUG',
'info': 'LC_INFO',
#'Info': 'UC_INFO',
'warn': 'LC_WARN',
#'Warn': 'UC_WARN',
#'error': 'LC_ERROR', # already above
#'Error': 'UC_ERROR',
'fatal': 'LC_FATAL',
#'Fatal': 'UC_FATAL',
'+Infinity': 'PLUS_INFINITY',
'-Infinity': 'MINUS_INFINITY',
'0': 'ZERO',
'+0': 'PLUS_ZERO',
'-0': 'MINUS_ZERO',
'NaN': 'NAN',
'isNaN': 'IS_NAN',
'MIN_VALUE': 'MIN_VALUE',
'MAX_VALUE': 'MAX_VALUE',
'NEGATIVE_INFINITY': 'NEGATIVE_INFINITY',
'POSITIVE_INFINITY': 'POSITIVE_INFINITY',
'(?:)': 'ESCAPED_EMPTY_REGEXP',
'Invalid Date': 'INVALID_DATE',
'decodeURIComponent': 'DECODE_URI_COMPONENT',
'encodeURIComponent': 'ENCODE_URI_COMPONENT',
'getUTCDate': 'GET_UTC_DATE',
'getUTCDay': 'GET_UTC_DAY',
'getUTCFullYear': 'GET_UTC_FULL_YEAR',
'getUTCHours': 'GET_UTC_HOURS',
'getUTCMilliseconds': 'GET_UTC_MILLISECONDS',
'getUTCMinutes': 'GET_UTC_MINUTES',
'getUTCMonth': 'GET_UTC_MONTH',
'getUTCSeconds': 'GET_UTC_SECONDS',
'setUTCDate': 'SET_UTC_DATE',
'setUTCDay': 'SET_UTC_DAY',
'setUTCFullYear': 'SET_UTC_FULL_YEAR',
'setUTCHours': 'SET_UTC_HOURS',
'setUTCMilliseconds': 'SET_UTC_MILLISECONDS',
'setUTCMinutes': 'SET_UTC_MINUTES',
'setUTCMonth': 'SET_UTC_MONTH',
'setUTCSeconds': 'SET_UTC_SECONDS',
'LOG10E': 'LOG10E',
'LOG2E': 'LOG2E',
'toISOString': 'TO_ISO_STRING',
'toUTCString': 'TO_UTC_STRING',
'toGMTString': 'TO_GMT_STRING',
'URIError': 'URI_ERROR',
'Duktape': 'DUKTAPE',
'': 'EMPTY_STRING',
',': 'COMMA',
' ': 'SPACE',
'\n\t': 'NEWLINE_TAB',
'[...]': 'BRACKETED_ELLIPSIS',
'{"_undef":true}': 'JSON_EXT_UNDEFINED',
'{"_nan":true}': 'JSON_EXT_NAN',
'{"_inf":true}': 'JSON_EXT_POSINF',
'{"_ninf":true}': 'JSON_EXT_NEGINF',
'{"_func":true}': 'JSON_EXT_FUNCTION1',
'{_func:true}': 'JSON_EXT_FUNCTION2',
'BYTES_PER_ELEMENT': 'BYTES_PER_ELEMENT',
}
#
# String table generation
#
# Get a define name for a string
def get_define_name(x):
x = x.name
if special_define_names.has_key(x):
return define_prefix + special_define_names[x]
is_internal = False
if len(x) >= 1 and x[0] == '\x00':
is_internal = True
x = x[1:]
res = ''
if is_internal:
res += 'INT_'
prev_upper = False
for idx, c in enumerate(x):
if c.isupper():
if (idx > 0 and not prev_upper):
res += '_'
res += c.upper()
prev_upper = c.isupper()
return define_prefix + res
def gen_strings_data_bitpacked(strlist):
be = dukutil.BitEncoder()
# Strings are encoded as follows: a string begins in lowercase
# mode and recognizes the following 5-bit symbols:
#
# 0-25 'a' ... 'z'
# 26 '_'
# 27 0x00 (actually decoded to 0xff, internal marker)
# 28 reserved
# 29 switch to uppercase for one character
# (next 5-bit symbol must be in range 0-25)
# 30 switch to uppercase
# 31 read a 7-bit character verbatim
#
# Uppercase mode is the same except codes 29 and 30 switch to
# lowercase.
UNDERSCORE = 26
ZERO = 27
SWITCH1 = 29
SWITCH = 30
SEVENBIT = 31
maxlen = 0
n_optimal = 0
n_switch1 = 0
n_switch = 0
n_sevenbit = 0
for s, d in strlist:
be.bits(len(s), 5)
if len(s) > maxlen:
maxlen = len(s)
# 5-bit character, mode specific
mode = 'lowercase'
for idx, c in enumerate(s):
# This encoder is not that optimal, but good enough for now.
islower = (ord(c) >= ord('a') and ord(c) <= ord('z'))
isupper = (ord(c) >= ord('A') and ord(c) <= ord('Z'))
islast = (idx == len(s) - 1)
isnextlower = False
isnextupper = False
if not islast:
c2 = s[idx+1]
isnextlower = (ord(c2) >= ord('a') and ord(c2) <= ord('z'))
isnextupper = (ord(c2) >= ord('A') and ord(c2) <= ord('Z'))
if c == '_':
be.bits(UNDERSCORE, 5)
n_optimal += 1
elif c == '\x00':
be.bits(ZERO, 5)
n_optimal += 1
elif islower and mode == 'lowercase':
be.bits(ord(c) - ord('a'), 5)
n_optimal += 1
elif isupper and mode == 'uppercase':
be.bits(ord(c) - ord('A'), 5)
n_optimal += 1
elif islower and mode == 'uppercase':
if isnextlower:
be.bits(SWITCH, 5)
be.bits(ord(c) - ord('a'), 5)
mode = 'lowercase'
n_switch += 1
else:
be.bits(SWITCH1, 5)
be.bits(ord(c) - ord('a'), 5)
n_switch1 += 1
elif isupper and mode == 'lowercase':
if isnextupper:
be.bits(SWITCH, 5)
be.bits(ord(c) - ord('A'), 5)
mode = 'uppercase'
n_switch += 1
else:
be.bits(SWITCH1, 5)
be.bits(ord(c) - ord('A'), 5)
n_switch1 += 1
else:
assert(ord(c) >= 0 and ord(c) <= 127)
be.bits(SEVENBIT, 5)
be.bits(ord(c), 7)
n_sevenbit += 1
#print 'sevenbit for: %r' % c
# end marker not necessary, C code knows length from define
res = be.getByteString()
print ('%d strings, %d bytes of string init data, %d maximum string length, ' + \
'encoding: optimal=%d,switch1=%d,switch=%d,sevenbit=%d') % \
(len(strlist), len(res), maxlen, \
n_optimal, n_switch1, n_switch, n_sevenbit)
return res, maxlen
def gen_string_list():
# Strings are ordered in the result as follows:
# 1. Strings not in either of the following two categories
# 2. Reserved words in strict mode only
# 3. Reserved words in both non-strict and strict mode
#
# Reserved words must follow an exact order because they are
# translated to/from token numbers by addition/subtraction.
# The remaining strings (in category 1) must be ordered so
# that those strings requiring an 8-bit index must be in the
# beginning.
#
# XXX: quite hacky, rework.
strlist = []
num_nonstrict_reserved = None
num_strict_reserved = None
num_all_reserved = None
idx_start_reserved = None
idx_start_strict_reserved = None
def _add(x, append):
n_str = x.name
n_def = get_define_name(x)
for o_str, o_def in strlist:
if o_str == n_str and o_def == n_def:
# same string, same define => no action
return
if o_str == n_str and o_def != n_def:
# same string, different define => should not happen
raise Exception('same string, different define for %s' % n_str)
if o_str != n_str and o_def == n_def:
# different string, same define => need custom defines
raise Exception('different string, same define for %s' % n_str)
# all ok, add
if append:
strlist.append((n_str, n_def))
else:
strlist.insert(0, (n_str, n_def))
# Add reserved words in order of occurrence first. The order matters
# because the string indices must be convertible to token numbers by
# addition/subtraction.
for i in standard_reserved_words_list:
_add(i, True)
num_nonstrict_reserved = len(strlist)
for i in standard_reserved_words_strict_string_list:
_add(i, True)
num_all_reserved = len(strlist)
num_strict_reserved = num_all_reserved - num_nonstrict_reserved
# Figure out, for the remaining strings, which strings need to be
# in the 8-bit range. Note that a certain string may appear multiple
# times in different roles (e.g. as a class name and a built-in object
# name) so check every occurrence.
req_8bit = {}
str_lists = [ standard_builtin_string_list,
standard_other_string_list,
es6_string_list,
commonjs_string_list,
buffer_string_list,
duk_string_list ]
for lst in str_lists:
for i in lst:
if i.req_8bit:
req_8bit[i.name] = True
# Prepend strings not requiring 8-bit indices first; then prepend
# strings requiring 8-bit indices (as early as possible).
for lst in str_lists:
for i in lst:
if req_8bit.has_key(i.name):
continue
_add(i, False)
for lst in str_lists:
for i in lst:
_add(i, False)
# Check that 8-bit string constraints are satisfied
for i,v in enumerate(strlist):
name, defname = v[0], v[1]
if req_8bit.has_key(name):
if i >= 256:
raise Exception('8-bit string index not satisfied: ' + repr(v))
#for i,v in enumerate(strlist):
# print(i,v)
idx_start_reserved = len(strlist) - num_all_reserved
idx_start_strict_reserved = len(strlist) - num_strict_reserved
return strlist, idx_start_reserved, idx_start_strict_reserved
class GenStrings:
strlist = None # list of (name, define) pairs
strdata = None # bit packed initializer data
idx_start_reserved = None # start of reserved keywords
idx_start_strict_reserved = None # start of strict reserved keywords
maxlen = None # length of longest string
string_to_index = None # map of name -> index
define_to_index = None # map of define name -> index
def __init__(self):
pass
def processStrings(self):
self.strlist, self.idx_start_reserved, self.idx_start_strict_reserved = gen_string_list()
self.strdata, self.maxlen = gen_strings_data_bitpacked(self.strlist)
# initialize lookup maps
self.string_to_index = {}
self.define_to_index = {}
idx = 0
for s, d in self.strlist:
self.string_to_index[s] = idx
self.define_to_index[d] = idx
idx += 1
def stringToIndex(self, x):
return self.string_to_index[x]
def defineToIndex(self, x):
return self.define_to_index[x]
def hasString(self, x):
return self.string_to_index.has_key(x)
def hasDefine(self, x):
return self.define_to_index.has_key(x)
def emitStringsData(self, genc):
genc.emitArray(self.strdata, 'duk_strings_data', visibility='DUK_INTERNAL', typename='duk_uint8_t', intvalues=True, const=True, size=len(self.strdata))
genc.emitLine('')
genc.emitLine('/* to convert a heap stridx to a token number, subtract')
genc.emitLine(' * DUK_STRIDX_START_RESERVED and add DUK_TOK_START_RESERVED.')
genc.emitLine(' */')
def emitStringsHeader(self, genc):
genc.emitLine('#if !defined(DUK_SINGLE_FILE)')
genc.emitLine('DUK_INTERNAL_DECL const duk_uint8_t duk_strings_data[%d];' % len(self.strdata))
genc.emitLine('#endif /* !DUK_SINGLE_FILE */')
genc.emitLine('')
genc.emitDefine('DUK_STRDATA_DATA_LENGTH', len(self.strdata))
genc.emitDefine('DUK_STRDATA_MAX_STRLEN', self.maxlen)
genc.emitLine('')
idx = 0
for s, d in self.strlist:
genc.emitDefine(d, idx, repr(s))
idx += 1
genc.emitLine('')
idx = 0
for s, d in self.strlist:
defname = d.replace('_STRIDX','_HEAP_STRING')
genc.emitDefine(defname + '(heap)', 'DUK_HEAP_GET_STRING((heap),%s)' % d)
defname = d.replace('_STRIDX', '_HTHREAD_STRING')
genc.emitDefine(defname + '(thr)', 'DUK_HTHREAD_GET_STRING((thr),%s)' % d)
idx += 1
genc.emitLine('')
genc.emitDefine('DUK_HEAP_NUM_STRINGS', idx)
genc.emitLine('')
genc.emitDefine('DUK_STRIDX_START_RESERVED', self.idx_start_reserved)
genc.emitDefine('DUK_STRIDX_START_STRICT_RESERVED', self.idx_start_strict_reserved)
genc.emitDefine('DUK_STRIDX_END_RESERVED', len(self.strlist), comment='exclusive endpoint')
def getStringList(self):
strs = []
strs_base64 = []
for s, d in self.strlist:
# The 'strs' list has strings as-is, with U+0000 marking the
# internal prefix (it's not correct as runtime we use \xFF).
#
# The 'strs_base64' is byte exact to allow an application to
# use it for e.g. external strings optimization. The strings
# are encoded to UTF-8, internal prefix is replaced with \xFF,
# and the result is base-64 encoded to maintain byte exactness.
t = s.encode('utf-8')
if len(t) > 0 and t[0] == '\x00':
t = '\xff' + t[1:]
t = t.encode('base64')
if len(t) > 0 and t[-1] == '\n':
t = t[0:-1]
strs.append(s)
strs_base64.append(t)
return strs, strs_base64
| mit |
joelddiaz/openshift-tools | openshift/installer/vendored/openshift-ansible-3.7.42-1/roles/lib_utils/src/ansible/yedit.py | 25 | 2195 | # flake8: noqa
# pylint: skip-file
# pylint: disable=too-many-branches
def main():
''' ansible oc module for secrets '''
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
src=dict(default=None, type='str'),
content=dict(default=None),
content_type=dict(default='dict', choices=['dict']),
key=dict(default='', type='str'),
value=dict(),
value_type=dict(default='', type='str'),
update=dict(default=False, type='bool'),
append=dict(default=False, type='bool'),
index=dict(default=None, type='int'),
curr_value=dict(default=None, type='str'),
curr_value_format=dict(default='yaml',
choices=['yaml', 'json', 'str'],
type='str'),
backup=dict(default=True, type='bool'),
separator=dict(default='.', type='str'),
edits=dict(default=None, type='list'),
),
mutually_exclusive=[["curr_value", "index"], ['update', "append"]],
required_one_of=[["content", "src"]],
)
# Verify we recieved either a valid key or edits with valid keys when receiving a src file.
# A valid key being not None or not ''.
if module.params['src'] is not None:
key_error = False
edit_error = False
if module.params['key'] in [None, '']:
key_error = True
if module.params['edits'] in [None, []]:
edit_error = True
else:
for edit in module.params['edits']:
if edit.get('key') in [None, '']:
edit_error = True
break
if key_error and edit_error:
module.fail_json(failed=True, msg='Empty value for parameter key not allowed.')
rval = Yedit.run_ansible(module.params)
if 'failed' in rval and rval['failed']:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
| apache-2.0 |
ericvandenbergfb/spark | examples/src/main/python/sql/hive.py | 50 | 3318 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on:spark_hive$
from os.path import expanduser, join, abspath
from pyspark.sql import SparkSession
from pyspark.sql import Row
# $example off:spark_hive$
"""
A simple example demonstrating Spark SQL Hive integration.
Run with:
./bin/spark-submit examples/src/main/python/sql/hive.py
"""
if __name__ == "__main__":
# $example on:spark_hive$
# warehouse_location points to the default location for managed databases and tables
warehouse_location = abspath('spark-warehouse')
spark = SparkSession \
.builder \
.appName("Python Spark SQL Hive integration example") \
.config("spark.sql.warehouse.dir", warehouse_location) \
.enableHiveSupport() \
.getOrCreate()
# spark is an existing SparkSession
spark.sql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING) USING hive")
spark.sql("LOAD DATA LOCAL INPATH 'examples/src/main/resources/kv1.txt' INTO TABLE src")
# Queries are expressed in HiveQL
spark.sql("SELECT * FROM src").show()
# +---+-------+
# |key| value|
# +---+-------+
# |238|val_238|
# | 86| val_86|
# |311|val_311|
# ...
# Aggregation queries are also supported.
spark.sql("SELECT COUNT(*) FROM src").show()
# +--------+
# |count(1)|
# +--------+
# | 500 |
# +--------+
# The results of SQL queries are themselves DataFrames and support all normal functions.
sqlDF = spark.sql("SELECT key, value FROM src WHERE key < 10 ORDER BY key")
# The items in DataFrames are of type Row, which allows you to access each column by ordinal.
stringsDS = sqlDF.rdd.map(lambda row: "Key: %d, Value: %s" % (row.key, row.value))
for record in stringsDS.collect():
print(record)
# Key: 0, Value: val_0
# Key: 0, Value: val_0
# Key: 0, Value: val_0
# ...
# You can also use DataFrames to create temporary views within a SparkSession.
Record = Row("key", "value")
recordsDF = spark.createDataFrame([Record(i, "val_" + str(i)) for i in range(1, 101)])
recordsDF.createOrReplaceTempView("records")
# Queries can then join DataFrame data with data stored in Hive.
spark.sql("SELECT * FROM records r JOIN src s ON r.key = s.key").show()
# +---+------+---+------+
# |key| value|key| value|
# +---+------+---+------+
# | 2| val_2| 2| val_2|
# | 4| val_4| 4| val_4|
# | 5| val_5| 5| val_5|
# ...
# $example off:spark_hive$
spark.stop()
| apache-2.0 |
colloquium/spacewalk | client/tools/rhnpush/rhnpush.py | 1 | 27859 | #
# Copyright (c) 2008--2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# $Id$
"""
Management tool for the RHN proxy.
This script performs various management operations on the RHN proxy:
- Creates the local directory structure needed to store local packages
- Uploads packages from a given directory to the RHN servers
- Optionally, once the packages are uploaded, they can be linked to (one or
more) channels, and copied in the local directories for these channels.
- Lists the RHN server's vision on a certain channel
- Checks if the local image of the channel (the local directory) is in sync
with the server's image, and prints the missing packages (or the extra
ones)
"""
import os
import random
import sys
import string
import time
import urlparse
import rhnpush_confmanager
try:
from optparse import Option, OptionParser
except ImportError:
from optik import Option, OptionParser
from rhn import rpclib
from spacewalk.common import rhn_mpm
from spacewalk.common.checksum import getFileChecksum
import uploadLib
import rhnpush_v2
# Global settings
BUFFER_SIZE = 65536
HEADERS_PER_CALL = 10
DEBUG = 0
RPMTAG_NOSOURCE = 1051
def main():
# Initialize a command-line processing object with a table of options
optionsTable = [
Option('-v','--verbose', action='count', help='Increase verbosity', default=0),
Option('-d','--dir', action='store', help='Process packages from this directory'),
Option('-c','--channel', action='append', help='Manage this channel (specified by label)'),
Option('-n','--count', action='store', help='Process this number of headers per call', type='int'),
Option('-l','--list', action='store_true', help='Only list the specified channels'),
Option('-r','--reldir', action='store', help='Relative dir to associate with the file'),
Option('-o','--orgid', action='store', help='Org ID', type='int'),
Option('-u','--username', action='store', help='Use this username to connect to RHN/Satellite'),
Option('-p','--password', action='store', help='Use this password to connect to RHN/Satellite'),
Option('-s','--stdin', action='store_true', help='Read the package names from stdin'),
Option('-X','--exclude', action='append', help='Exclude packages that match this glob expression'),
Option( '--force', action='store_true', help='Force the package upload (overwrites if already uploaded)'),
Option( '--nosig', action='store_true', help='Push unsigned packages'),
Option( '--newest', action='store_true', help='Only push the packages that are newer than the server ones'),
Option( '--nullorg', action='store_true', help='Use the null org id'),
Option( '--header', action='store_true', help='Upload only the header(s)'),
Option( '--source', action='store_true', help='Upload source package information'),
Option( '--server', action='store', help='Push to this server (http[s]://<hostname>/APP)'),
Option( '--proxy', action='store', help='Use proxy server (<server>:<port>)'),
Option( '--test', action='store_true', help='Only print the packages to be pushed'),
Option('-?','--usage', action='store_true', help='Briefly describe the options'),
Option('-N','--new-cache', action='store_true', help='Create a new username/password cache'),
Option( '--no-cache', action='store_true', help='Do not create a username/password cache'),
Option( '--extended-test', action='store_true', help='Perform a more verbose test'),
Option( '--no-session-caching', action='store_true',
help='Disables session-token support. Useful for using rhnpush with pre-4.0.6 satellites.'),
Option( '--tolerant', action='store_true',
help='If rhnpush errors while uploading a package, continue uploading the rest of the packages.')
]
#Having to maintain a store_true list is ugly. I'm trying to get rid of this.
#12/22/05 wregglej 173287 Added no_cache to true_list so it's value gets changed from a string to an int.
true_list = ['usage', 'test', 'source', 'header', 'nullorg', 'newest',\
'nosig', 'force', 'list', 'stdin', 'new_cache','extended_test', 'no_cache',\
'no_session_caching', 'tolerant']
optionParser = OptionParser(option_list=optionsTable, usage="%prog [OPTION] [<package>]")
manager = rhnpush_confmanager.ConfManager(optionParser, true_list)
options = manager.get_config()
upload = UploadClass(options, files=options.files)
if options.usage:
optionParser.print_usage()
sys.exit(0)
if options.list:
if not options.channel:
upload.die(1, "Must specify a channel for --list to work")
upload.list()
return
if options.dir and not options.stdin:
upload.directory()
elif options.stdin and not options.dir:
upload.readStdin()
elif options.dir and options.stdin:
upload.readStdin()
upload.directory()
if options.exclude:
upload.filter_excludes()
if options.newest:
if not options.channel:
upload.die(1, "Must specify a channel for --newest to work")
upload.newest()
if not upload.files:
if upload.newest:
print "No new files to upload; exiting"
else:
print "Nothing to do (try --help for more options)"
sys.exit(0)
if options.test:
upload.test()
return
if options.extended_test:
upload.extended_test()
return
if options.header:
upload.uploadHeaders()
return
ret = upload.packages()
if ret != 0:
return 1
class UploadClass(uploadLib.UploadClass):
def setURL(self):
server = self.options.server
if server is None:
self.die(1, "Required parameter --server not supplied")
scheme, netloc, path, params, query, fragment = urlparse.urlparse(server)
if not netloc:
# No schema - trying to patch it up ourselves?
server = "http://" + server
scheme, netloc, path, params, query, fragment = urlparse.urlparse(server)
if not netloc:
self.die(2, "Invalid URL %s" % server)
if path == '':
path = '/APP'
if string.lower(scheme) not in ('http', 'https'):
self.die(3, "Unknown URL scheme %s" % scheme)
self.url = urlparse.urlunparse((scheme, netloc, path, params, query,
fragment))
self.url_v2 = urlparse.urlunparse((scheme, netloc, "/PACKAGE-PUSH",
params, query, fragment))
def setOrg(self):
if self.options.nullorg:
if self.options.force:
self.die(1, "ERROR: You cannot force a package to a nullorg channel.")
else:
# They push things to the None org id
self.orgId = ''
else:
self.orgId = self.options.orgid or -1
def setForce(self):
if self.options.force:
self.force = 4
else:
self.force = None
def setRelativeDir(self):
self.relativeDir = self.options.reldir
def setChannels(self):
self.channels = self.options.channel or []
def _test_force(self):
test_force_str = "Setting force flag: %s"
test_force = "Passed"
try:
self.setForce()
except:
test_force = "Failed"
print test_force_str % test_force
def _test_set_org(self):
test_set_org_str = "Setting the org: %s"
test_set_org = "Passed"
try:
self.setOrg()
except:
test_set_org = "Failed"
print test_set_org_str % test_set_org
def _test_set_url(self):
test_set_url_str = "Setting the URL: %s"
test_set_url = "Passed"
try:
self.setURL()
except:
test_set_url = "Failed"
print test_set_url_str % test_set_url
def _test_set_channels(self):
test_set_channels_str = "Setting the channels: %s"
test_set_channels = "Passed"
try:
self.setChannels()
except:
test_set_channels = "Failed"
print test_set_channels_str % test_set_channels
def _test_username_password(self):
test_user_pass_str = "Setting the username and password: %s"
test_user_pass = "Passed"
try:
self.setUsernamePassword()
except:
test_user_pass = "Failed"
print test_user_pass_str % test_user_pass
def _test_set_server(self):
test_set_server_str = "Setting the server: %s"
test_set_server = "Passed"
try:
self.setServer()
except:
test_set_server = "Failed"
print test_set_server_str % test_set_server
def _test_connect(self):
auth_ret = uploadLib.call(self.server.packages.test_login, self.username, self.password )
if auth_ret == 1:
test_auth = "Passed"
else:
test_auth = "Failed"
print "Testing connection and authentication: %s" % test_auth
def _test_access(self):
if self.new_sat_test():
access_ret = callable(self.server.packages.channelPackageSubscriptionBySession)
else:
access_ret = callable(self.server.packages.channelPackageSubscription)
if access_ret == 1:
test_access = "Passed"
else:
test_access = "Failed"
print "Testing access to upload functionality on server: %s" % test_access
#12/22/05 wregglej 173287 Added a this funtion to test the new session authentication stuff.
#It still needs work.
def _test_authenticate(self):
self.authenticate()
def extended_test(self):
self._test_force()
self._test_set_org()
self._test_set_url()
self._test_set_channels()
self._test_username_password()
self._test_set_server()
self._test_connect()
self._test_access()
print "The files that would have been pushed:"
self.test()
def packages(self):
self.setForce()
# set the org
self.setOrg()
# set the URL
self.setURL()
# set the channels
self.setChannels()
# set the server
self.setServer()
#12/22/05 wregglej 173287 authenticate the session.
self.authenticate()
# Do we have the new-style handler available?
#ping the server for status
self.warn(2,"url is",self.url_v2)
ping = rhnpush_v2.PingPackageUpload(self.url_v2, self.options.proxy)
self.ping_status, errmsg, headerinfo = ping.ping()
self.warn(2, "Result codes:", self.ping_status, errmsg)
# move patch clusters to the end because all the patches in the cluster
# have to be pushed before the cluster itself
files1 = []
files2 = []
for file in self.files:
if file.startswith('patch-cluster-'):
files2.append(file)
else:
files1.append(file)
self.files = files1 + files2
channel_packages = []
# a little fault tolarence is in order
random.seed()
checkpkgflag = 0
tries = 3
#pkilambi:check if the Sat version we are talking to has this capability.
#If not use the normal way to talk to older satellites(< 4.1.0).
if headerinfo.getheader('X-RHN-Check-Package-Exists'):
checkpkgflag = 1
(server_digest_hash, pkgs_info, digest_hash) = self.check_package_exists()
for pkg in self.files:
ret = None #pkilambi:errors off as not initialized.this fixes it.
#temporary fix for picking pkgs instead of full paths
pkg_key = (pkg.strip()).split('/')[-1]
if checkpkgflag :
# it's newer satellite, compute checksum checks on client.
if not server_digest_hash.has_key(pkg_key):
continue
checksum_type, checksum = digest = digest_hash[pkg_key]
server_digest = tuple(server_digest_hash[pkg_key])
# compare checksums for existance check
if server_digest == digest and not self.options.force:
channel_packages.append(pkgs_info[pkg_key])
self.warn(1, "Package %s already exists on the RHN Server-- Skipping Upload...." % pkg)
continue
elif server_digest == ():
self.warn(1,"Package %s Not Found on RHN Server -- Uploading" % pkg)
elif server_digest == "on-disk" and not self.options.force:
channel_packages.append(pkgs_info[pkg_key])
self.warn(0,"Package on disk but not on db -- Skipping Upload "%pkg)
continue
elif server_digest != digest:
if self.options.force:
self.warn(1,"Package checksum %s mismatch -- Forcing Upload"% pkg)
else:
msg = """Error: Package %s already exists on the server with a different checksum. Skipping upload to prevent overwriting existing package. (You may use rhnpush with the --force option to force this upload if the force_upload option is enabled on your server.)\n"""% pkg
if not self.options.tolerant:
self.die(-1, msg)
self.warn(0, msg)
continue
else:
# it's an older satellite(< 4.1.0). Just do the push the usual old way,
# without checksum pre-check.
try:
f = open(pkg)
header, payload_stream = rhn_mpm.load(file=f)
checksum_type = header.checksum_type()
except rhn_mpm.InvalidPackageError, e:
if not self.options.tolerant:
self.die(-1, "ERROR: %s: This file doesn't appear to be a package" % pkg)
self.warn(2, "ERROR: %s: This file doesn't appear to be a package" % pkg)
continue
except IOError:
if not self.options.tolerant:
self.die(-1, "ERROR: %s: No such file or directory available" % pkg)
self.warn(2, "ERROR: %s: No such file or directory available" % pkg)
continue
checksum = getFileChecksum(checksum_type, file=payload_stream)
f.close()
for t in range(0, tries):
try:
ret = self.package(pkg, checksum_type, checksum)
if ret is None:
raise UploadError()
# TODO: Revisit this. We throw this error all over the place,
# but doing so will cause us to skip the --tolerant logic
# below. I don't think we really want this behavior.
# There are some cases where we don't want to retry 3
# times, but not at the expense of disabling the tolerant
# flag, IMHO. This loop needs some lovin'. -- pav
#FIX: it checks for tolerant flag and aborts only if the flag is
#not specified
except UploadError, ue:
if not self.options.tolerant:
self.die(1, ue)
self.warn(2, ue)
except AuthenticationRequired, a:
#session expired so we re-authenticate for the process to complete
#this uses the username and password from memory if available
#else it prompts for one.
self.authenticate()
except:
self.warn(2, sys.exc_info()[1])
wait = random.randint(1, 5)
self.warn(0, "Waiting %d seconds and trying again..." % wait)
time.sleep(wait)
#The else clause gets executed in the stuff in the try-except block *succeeds*.
else:
break
#if the preceeding for-loop exits without a call to break, then this else clause gets called.
#What's kind of weird is that if the preceeding for-loop doesn't call break then an error occured
#and all of retry attempts failed. If the for-loop *does* call break then everything is hunky-dory.
#In short, this else clause only get's called if something is F.U.B.A.R and the retry attempts don't
#fix anything.
else:
if not self.options.tolerant:
#pkilambi:bug#176358:this exits with a error code of 1
self.die(1, "Giving up after %d attempts" % tries)
else:
print "Giving up after %d attempts and continuing on..." % (tries,)
#5/13/05 wregglej - 154248 ?? we still want to add the packages if they're source.
if ret and self.channels: # and ret['arch'] != 'src':
# Don't bother to add the package if
# no channel was specified or a source rpm was passed
channel_packages.append(ret)
#self.channels is never None, it always has at least one entry with an empty string.
if len(self.channels) == 1 and self.channels[0] == '':
return
info = {
'packages' : channel_packages,
'channels' : self.channels
}
if self.orgId == '' or self.orgId > 0:
info['orgId'] = self.orgId
#2/3/06 wregglej 173287 Added check to see if we can use session tokens.
if channel_packages:
if self.new_sat_test():
#12/22/05 wregglej 173287 Changed the XMLRPC function to the new session-based one.
self.authenticate()
uploadLib.call(self.server.packages.channelPackageSubscriptionBySession,
self.session.getSessionString(), info)
else:
uploadLib.call(self.server.packages.channelPackageSubscription, self.username,
self.password, info)
return 0
# does an existance check of the packages to be uploaded and returns their checksum and other info
def check_package_exists(self):
self.warn(2, "Computing checksum and package info. This may take some time ...")
pkg_hash = {}
digest_hash = {}
for pkg in self.files:
pkg_info = {}
pkg_key = (pkg.strip()).split('/')[-1]
if not os.access(pkg, os.R_OK):
if not self.options.tolerant:
self.die(-1, "Could not read file %s" % pkg)
self.warn(-1, "Could not read file %s" % pkg)
continue
try:
f = open(pkg)
header, payload_stream = rhn_mpm.load(file=f)
checksum_type = header.checksum_type()
except rhn_mpm.InvalidPackageError, e:
if not self.options.tolerant:
self.die(-1, "ERROR: %s: This file doesn't appear to be a package" % pkg)
self.warn(2, "ERROR: %s: This file doesn't appear to be a package" % pkg)
continue
except IOError:
if not self.options.tolerant:
self.die(-1, "ERROR: %s: No such file or directory available" % pkg)
self.warn(2, "ERROR: %s: No such file or directory available" % pkg)
continue
checksum = getFileChecksum(checksum_type, file=payload_stream)
digest_hash[pkg_key] = (checksum_type, checksum)
f.close()
for tag in ('name', 'version', 'release', 'epoch', 'arch'):
val = header[tag]
if val is None:
val = ''
pkg_info[tag] = val
#b195903:the arch for srpms should be obtained by is_source check
#instead of checking arch in header
if header.is_source:
if not self.options.source:
self.die(-1, "ERROR: Trying to Push src rpm, Please re-try with --source.")
if RPMTAG_NOSOURCE in header.keys():
pkg_info['arch'] = 'nosrc'
else:
pkg_info['arch'] = 'src'
pkg_info['checksum_type'] = checksum_type
pkg_info['checksum'] = checksum
pkg_hash[pkg_key] = pkg_info
if self.options.nullorg:
#to satisfy xmlrpc from None values.
orgid = 'null'
else:
orgid = ''
info = {
'packages' : pkg_hash,
'channels' : self.channels,
'org_id' : orgid,
'force' : self.options.force or 0
}
# rpc call to get checksum info for all the packages to be uploaded
if not self.options.source:
if self.new_sat_test():
# computing checksum and other info is expensive process and session
# could have expired.Make sure its re-authenticated.
self.authenticate()
if uploadLib.exists_getPackageChecksumBySession(self.server):
checksum_data = uploadLib.getPackageChecksumBySession(self.server, self.session.getSessionString(), info)
else:
# old server only md5 capable
checksum_data = uploadLib.getPackageMD5sumBySession(self.server, self.session.getSessionString(), info)
else:
# even older server without session authentication
checksum_data = uploadLib.getPackageMD5sum(self.server, self.username, self.password, info)
else:
if self.new_sat_test():
# computing checksum and other info is expensive process and session
# could have expired.Make sure its re-authenticated.
self.authenticate()
if uploadLib.exists_getPackageChecksumBySession(self.server):
checksum_data = uploadLib.getSourcePackageChecksumBySession(self.server, self.session.getSessionString(), info)
else:
# old server only md5 capable
checksum_data = uploadLib.getSourcePackageMD5sumBySession(self.server, self.session.getSessionString(), info)
else:
# even older server without session authentication
checksum_data = uploadLib.getSourcePackageMD5sum(self.server, self.username, self.password, info)
return (checksum_data, pkg_hash, digest_hash)
def package(self, package, FileChecksumType, FileChecksum):
self.warn(1, "Uploading package %s" % package)
if not os.access(package, os.R_OK):
self.die(-1, "Could not read file %s" % package)
try:
h = uploadLib.get_header(package, source=self.options.source)
except uploadLib.InvalidPackageError, e:
# GS: MALFORMED PACKAGE
print "Unable to load package", package
return None
if hasattr(h, 'packaging'):
packaging = h.packaging
else:
packaging = 'rpm'
if packaging == 'rpm' and self.options.nosig is None and not h.is_signed():
#pkilambi:bug#173886:force exit to check for sig if --nosig
raise UploadError("ERROR: %s: unsigned rpm (use --nosig to force)"% package)
try:
ret = self._push_package_v2(package, FileChecksumType, FileChecksum)
except UploadError, e:
ret, diff_level, pdict = e.args[:3]
severities = {
1 : 'path changed',
2 : 'package resigned',
3 : 'differing build times or hosts',
4 : 'package recompiled',
}
if severities.has_key(diff_level):
strmsg = \
"Error: Package with same name already exists on " + \
"server but contents differ (" + \
severities[diff_level] + \
"). Use --force or remove old package before " + \
"uploading the newer version."
else:
strmsg = "Error: severity %s" % diff_level
self.warn(-1, "Uploading failed for %s\n%s\n\tDiff: %s" % \
(package, strmsg, pdict['diff']['diff']))
if diff_level != 1:
# This will prevent us from annoyingly retrying when there is
# no reason to.
raise UploadError()
return ret
return ret
def _push_package_v2(self, package, FileChecksumType, FileChecksum):
self.warn(1, "Using POST request")
pu = rhnpush_v2.PackageUpload(self.url_v2, self.options.proxy)
if self.new_sat_test():
pu.set_session(self.session.getSessionString())
else:
pu.set_auth(self.username, self.password)
pu.set_force(self.options.force)
pu.set_null_org(self.options.nullorg)
status, msgstr = pu.upload(package, FileChecksumType, FileChecksum)
ret = {}
for tag in ('name', 'version', 'release', 'epoch', 'arch'):
val = getattr(pu, "package_%s" % tag)
if val is None:
val = ''
ret[tag] = val
ret['checksum_type'] = FileChecksumType
ret['checksum'] = FileChecksum
if status == 400:
# Bad request - something bad happened
try:
data = rpclib.xmlrpclib.loads(msgstr)
except:
# Raise the exception instead of silently dying
raise UploadError("Error pushing %s: %s (%s)" %
(package, msgstr, status))
(diff_dict, ), methodname = data
del methodname
diff_level = diff_dict['level']
pdict = diff_dict['diff']
raise UploadError(ret, diff_level, pdict)
if status == 403:
#auth expired raise an exception to grab one
raise AuthenticationRequired()
if status != 200:
self.die(1, "Error pushing %s: %s (%s)" % (package, msgstr, status))
return ret
class UploadError(Exception):
pass
class AuthenticationRequired(Exception):
pass
if __name__ == '__main__':
# test code
sys.exit(main() or 0)
| gpl-2.0 |
avoinsystems/odoo | addons/project_issue_sheet/__openerp__.py | 260 | 1814 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Timesheet on Issues',
'version': '1.0',
'category': 'Project Management',
'description': """
This module adds the Timesheet support for the Issues/Bugs Management in Project.
=================================================================================
Worklogs can be maintained to signify number of hours spent by users to handle an issue.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/project-management',
'depends': [
'project_issue',
'hr_timesheet_sheet',
],
'data': [
'project_issue_sheet_view.xml',
'security/ir.model.access.csv',
],
'demo': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
wolfram74/numerical_methods_iserles_notes | venv/lib/python2.7/site-packages/IPython/kernel/inprocess/client.py | 4 | 5681 | """A client for in-process kernels."""
#-----------------------------------------------------------------------------
# Copyright (C) 2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# IPython imports
from IPython.kernel.inprocess.socket import DummySocket
from IPython.utils.traitlets import Type, Instance
from IPython.kernel.clientabc import KernelClientABC
from IPython.kernel.client import KernelClient
# Local imports
from .channels import (
InProcessChannel,
InProcessHBChannel,
)
#-----------------------------------------------------------------------------
# Main kernel Client class
#-----------------------------------------------------------------------------
class InProcessKernelClient(KernelClient):
"""A client for an in-process kernel.
This class implements the interface of
`IPython.kernel.clientabc.KernelClientABC` and allows
(asynchronous) frontends to be used seamlessly with an in-process kernel.
See `IPython.kernel.client.KernelClient` for docstrings.
"""
# The classes to use for the various channels.
shell_channel_class = Type(InProcessChannel)
iopub_channel_class = Type(InProcessChannel)
stdin_channel_class = Type(InProcessChannel)
hb_channel_class = Type(InProcessHBChannel)
kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
#--------------------------------------------------------------------------
# Channel management methods
#--------------------------------------------------------------------------
def start_channels(self, *args, **kwargs):
super(InProcessKernelClient, self).start_channels(self)
self.kernel.frontends.append(self)
@property
def shell_channel(self):
if self._shell_channel is None:
self._shell_channel = self.shell_channel_class(self)
return self._shell_channel
@property
def iopub_channel(self):
if self._iopub_channel is None:
self._iopub_channel = self.iopub_channel_class(self)
return self._iopub_channel
@property
def stdin_channel(self):
if self._stdin_channel is None:
self._stdin_channel = self.stdin_channel_class(self)
return self._stdin_channel
@property
def hb_channel(self):
if self._hb_channel is None:
self._hb_channel = self.hb_channel_class(self)
return self._hb_channel
# Methods for sending specific messages
# -------------------------------------
def execute(self, code, silent=False, store_history=True,
user_expressions={}, allow_stdin=None):
if allow_stdin is None:
allow_stdin = self.allow_stdin
content = dict(code=code, silent=silent, store_history=store_history,
user_expressions=user_expressions,
allow_stdin=allow_stdin)
msg = self.session.msg('execute_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def complete(self, code, cursor_pos=None):
if cursor_pos is None:
cursor_pos = len(code)
content = dict(code=code, cursor_pos=cursor_pos)
msg = self.session.msg('complete_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def inspect(self, code, cursor_pos=None, detail_level=0):
if cursor_pos is None:
cursor_pos = len(code)
content = dict(code=code, cursor_pos=cursor_pos,
detail_level=detail_level,
)
msg = self.session.msg('inspect_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def history(self, raw=True, output=False, hist_access_type='range', **kwds):
content = dict(raw=raw, output=output,
hist_access_type=hist_access_type, **kwds)
msg = self.session.msg('history_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def shutdown(self, restart=False):
# FIXME: What to do here?
raise NotImplementedError('Cannot shutdown in-process kernel')
def kernel_info(self):
"""Request kernel info."""
msg = self.session.msg('kernel_info_request')
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def input(self, string):
if self.kernel is None:
raise RuntimeError('Cannot send input reply. No kernel exists.')
self.kernel.raw_input_str = string
def _dispatch_to_kernel(self, msg):
""" Send a message to the kernel and handle a reply.
"""
kernel = self.kernel
if kernel is None:
raise RuntimeError('Cannot send request. No kernel exists.')
stream = DummySocket()
self.session.send(stream, msg)
msg_parts = stream.recv_multipart()
kernel.dispatch_shell(stream, msg_parts)
idents, reply_msg = self.session.recv(stream, copy=False)
self.shell_channel.call_handlers_later(reply_msg)
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
KernelClientABC.register(InProcessKernelClient)
| mit |
837468220/python-for-android | python3-alpha/python3-src/Lib/encodings/__init__.py | 46 | 5146 | """ Standard "encodings" Package
Standard Python encoding modules are stored in this package
directory.
Codec modules must have names corresponding to normalized encoding
names as defined in the normalize_encoding() function below, e.g.
'utf-8' must be implemented by the module 'utf_8.py'.
Each codec module must export the following interface:
* getregentry() -> codecs.CodecInfo object
The getregentry() API must return a CodecInfo object with encoder, decoder,
incrementalencoder, incrementaldecoder, streamwriter and streamreader
atttributes which adhere to the Python Codec Interface Standard.
In addition, a module may optionally also define the following
APIs which are then used by the package's codec search function:
* getaliases() -> sequence of encoding name strings to use as aliases
Alias names returned by getaliases() must be normalized encoding
names as defined by normalize_encoding().
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import codecs
from . import aliases
_cache = {}
_unknown = '--unknown--'
_import_tail = ['*']
_aliases = aliases.aliases
class CodecRegistryError(LookupError, SystemError):
pass
def normalize_encoding(encoding):
""" Normalize an encoding name.
Normalization works as follows: all non-alphanumeric
characters except the dot used for Python package names are
collapsed and replaced with a single underscore, e.g. ' -;#'
becomes '_'. Leading and trailing underscores are removed.
Note that encoding names should be ASCII only; if they do use
non-ASCII characters, these must be Latin-1 compatible.
"""
if isinstance(encoding, bytes):
encoding = str(encoding, "ascii")
chars = []
punct = False
for c in encoding:
if c.isalnum() or c == '.':
if punct and chars:
chars.append('_')
chars.append(c)
punct = False
else:
punct = True
return ''.join(chars)
def search_function(encoding):
# Cache lookup
entry = _cache.get(encoding, _unknown)
if entry is not _unknown:
return entry
# Import the module:
#
# First try to find an alias for the normalized encoding
# name and lookup the module using the aliased name, then try to
# lookup the module using the standard import scheme, i.e. first
# try in the encodings package, then at top-level.
#
norm_encoding = normalize_encoding(encoding)
aliased_encoding = _aliases.get(norm_encoding) or \
_aliases.get(norm_encoding.replace('.', '_'))
if aliased_encoding is not None:
modnames = [aliased_encoding,
norm_encoding]
else:
modnames = [norm_encoding]
for modname in modnames:
if not modname or '.' in modname:
continue
try:
# Import is absolute to prevent the possibly malicious import of a
# module with side-effects that is not in the 'encodings' package.
mod = __import__('encodings.' + modname, fromlist=_import_tail,
level=0)
except ImportError:
pass
else:
break
else:
mod = None
try:
getregentry = mod.getregentry
except AttributeError:
# Not a codec module
mod = None
if mod is None:
# Cache misses
_cache[encoding] = None
return None
# Now ask the module for the registry entry
entry = getregentry()
if not isinstance(entry, codecs.CodecInfo):
if not 4 <= len(entry) <= 7:
raise CodecRegistryError('module "%s" (%s) failed to register'
% (mod.__name__, mod.__file__))
if not hasattr(entry[0], '__call__') or \
not hasattr(entry[1], '__call__') or \
(entry[2] is not None and not hasattr(entry[2], '__call__')) or \
(entry[3] is not None and not hasattr(entry[3], '__call__')) or \
(len(entry) > 4 and entry[4] is not None and not hasattr(entry[4], '__call__')) or \
(len(entry) > 5 and entry[5] is not None and not hasattr(entry[5], '__call__')):
raise CodecRegistryError('incompatible codecs in module "%s" (%s)'
% (mod.__name__, mod.__file__))
if len(entry)<7 or entry[6] is None:
entry += (None,)*(6-len(entry)) + (mod.__name__.split(".", 1)[1],)
entry = codecs.CodecInfo(*entry)
# Cache the codec registry entry
_cache[encoding] = entry
# Register its aliases (without overwriting previously registered
# aliases)
try:
codecaliases = mod.getaliases()
except AttributeError:
pass
else:
for alias in codecaliases:
if alias not in _aliases:
_aliases[alias] = modname
# Return the registry entry
return entry
# Register the search_function in the Python codec registry
codecs.register(search_function)
| apache-2.0 |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/comtypes-1.1.3-py2.7.egg/comtypes/test/test_BSTR.py | 3 | 1503 | import unittest, os
from ctypes import *
from comtypes import BSTR
from comtypes.test import requires
##requires("memleaks")
from comtypes.test.find_memleak import find_memleak
class Test(unittest.TestCase):
def check_leaks(self, func, limit=0):
bytes = find_memleak(func)
self.failIf(bytes > limit, "Leaks %d bytes" % bytes)
def test_creation(self):
def doit():
BSTR(u"abcdef" * 100)
# It seems this test is unreliable. Sometimes it leaks 4096
# bytes, sometimes not. Try to workaround that...
self.check_leaks(doit, limit=4096)
def test_from_param(self):
def doit():
BSTR.from_param(u"abcdef")
self.check_leaks(doit)
def test_paramflags(self):
prototype = WINFUNCTYPE(c_void_p, BSTR)
func = prototype(("SysStringLen", oledll.oleaut32))
func.restype = c_void_p
func.argtypes = (BSTR, )
def doit():
func(u"abcdef")
func(u"abc xyz")
func(BSTR(u"abc def"))
self.check_leaks(doit)
def test_inargs(self):
SysStringLen = windll.oleaut32.SysStringLen
SysStringLen.argtypes = BSTR,
SysStringLen.restype = c_uint
self.failUnlessEqual(SysStringLen("abc xyz"), 7)
def doit():
SysStringLen("abc xyz")
SysStringLen(u"abc xyz")
SysStringLen(BSTR(u"abc def"))
self.check_leaks(doit)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
wujuguang/sqlalchemy | lib/sqlalchemy/dialects/postgresql/pygresql.py | 1 | 8129 | # postgresql/pygresql.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: postgresql+pygresql
:name: pygresql
:dbapi: pgdb
:connectstring: postgresql+pygresql://user:password@host:port/dbname[?key=value&key=value...]
:url: http://www.pygresql.org/
.. note::
The pygresql dialect is **not tested as part of SQLAlchemy's continuous
integration** and may have unresolved issues. The recommended PostgreSQL
dialect is psycopg2.
""" # noqa
import decimal
import re
from .base import _DECIMAL_TYPES
from .base import _FLOAT_TYPES
from .base import _INT_TYPES
from .base import PGCompiler
from .base import PGDialect
from .base import PGIdentifierPreparer
from .base import UUID
from .hstore import HSTORE
from .json import JSON
from .json import JSONB
from ... import exc
from ... import processors
from ... import util
from ...sql.elements import Null
from ...types import JSON as Json
from ...types import Numeric
class _PGNumeric(Numeric):
def bind_processor(self, dialect):
return None
def result_processor(self, dialect, coltype):
if not isinstance(coltype, int):
coltype = coltype.oid
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(
decimal.Decimal, self._effective_decimal_return_scale
)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# PyGreSQL returns Decimal natively for 1700 (numeric)
return None
else:
raise exc.InvalidRequestError(
"Unknown PG numeric type: %d" % coltype
)
else:
if coltype in _FLOAT_TYPES:
# PyGreSQL returns float natively for 701 (float8)
return None
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
return processors.to_float
else:
raise exc.InvalidRequestError(
"Unknown PG numeric type: %d" % coltype
)
class _PGHStore(HSTORE):
def bind_processor(self, dialect):
if not dialect.has_native_hstore:
return super(_PGHStore, self).bind_processor(dialect)
hstore = dialect.dbapi.Hstore
def process(value):
if isinstance(value, dict):
return hstore(value)
return value
return process
def result_processor(self, dialect, coltype):
if not dialect.has_native_hstore:
return super(_PGHStore, self).result_processor(dialect, coltype)
class _PGJSON(JSON):
def bind_processor(self, dialect):
if not dialect.has_native_json:
return super(_PGJSON, self).bind_processor(dialect)
json = dialect.dbapi.Json
def process(value):
if value is self.NULL:
value = None
elif isinstance(value, Null) or (
value is None and self.none_as_null
):
return None
if value is None or isinstance(value, (dict, list)):
return json(value)
return value
return process
def result_processor(self, dialect, coltype):
if not dialect.has_native_json:
return super(_PGJSON, self).result_processor(dialect, coltype)
class _PGJSONB(JSONB):
def bind_processor(self, dialect):
if not dialect.has_native_json:
return super(_PGJSONB, self).bind_processor(dialect)
json = dialect.dbapi.Json
def process(value):
if value is self.NULL:
value = None
elif isinstance(value, Null) or (
value is None and self.none_as_null
):
return None
if value is None or isinstance(value, (dict, list)):
return json(value)
return value
return process
def result_processor(self, dialect, coltype):
if not dialect.has_native_json:
return super(_PGJSONB, self).result_processor(dialect, coltype)
class _PGUUID(UUID):
def bind_processor(self, dialect):
if not dialect.has_native_uuid:
return super(_PGUUID, self).bind_processor(dialect)
uuid = dialect.dbapi.Uuid
def process(value):
if value is None:
return None
if isinstance(value, (str, bytes)):
if len(value) == 16:
return uuid(bytes=value)
return uuid(value)
if isinstance(value, int):
return uuid(int=value)
return value
return process
def result_processor(self, dialect, coltype):
if not dialect.has_native_uuid:
return super(_PGUUID, self).result_processor(dialect, coltype)
if not self.as_uuid:
def process(value):
if value is not None:
return str(value)
return process
class _PGCompiler(PGCompiler):
def visit_mod_binary(self, binary, operator, **kw):
return (
self.process(binary.left, **kw)
+ " %% "
+ self.process(binary.right, **kw)
)
def post_process_text(self, text):
return text.replace("%", "%%")
class _PGIdentifierPreparer(PGIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value.replace("%", "%%")
class PGDialect_pygresql(PGDialect):
driver = "pygresql"
statement_compiler = _PGCompiler
preparer = _PGIdentifierPreparer
@classmethod
def dbapi(cls):
import pgdb
return pgdb
colspecs = util.update_copy(
PGDialect.colspecs,
{
Numeric: _PGNumeric,
HSTORE: _PGHStore,
Json: _PGJSON,
JSON: _PGJSON,
JSONB: _PGJSONB,
UUID: _PGUUID,
},
)
def __init__(self, **kwargs):
super(PGDialect_pygresql, self).__init__(**kwargs)
try:
version = self.dbapi.version
m = re.match(r"(\d+)\.(\d+)", version)
version = (int(m.group(1)), int(m.group(2)))
except (AttributeError, ValueError, TypeError):
version = (0, 0)
self.dbapi_version = version
if version < (5, 0):
has_native_hstore = has_native_json = has_native_uuid = False
if version != (0, 0):
util.warn(
"PyGreSQL is only fully supported by SQLAlchemy"
" since version 5.0."
)
else:
self.supports_unicode_statements = True
self.supports_unicode_binds = True
has_native_hstore = has_native_json = has_native_uuid = True
self.has_native_hstore = has_native_hstore
self.has_native_json = has_native_json
self.has_native_uuid = has_native_uuid
def create_connect_args(self, url):
opts = url.translate_connect_args(username="user")
if "port" in opts:
opts["host"] = "%s:%s" % (
opts.get("host", "").rsplit(":", 1)[0],
opts.pop("port"),
)
opts.update(url.query)
return [], opts
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.Error):
if not connection:
return False
try:
connection = connection.connection
except AttributeError:
pass
else:
if not connection:
return False
try:
return connection.closed
except AttributeError: # PyGreSQL < 5.0
return connection._cnx is None
return False
dialect = PGDialect_pygresql
| mit |
bugbound/webnuke | libs/angular/angularCustomJavascript.py | 1 | 1178 | class AngularCustomJavascript:
def __init__(self, jsinjector):
self.version = 0.1
self.jsinjector = jsinjector
self.jsinjector.add_help_topic('wn_showAngularAppName()', 'Show AngularJS Main Application Name')
self.jsinjector.add_js_file('libs/angular/js/app-name.js')
self.jsinjector.add_help_topic('wn_showAngularDeps()', 'Show AngularJS Main Dependencies')
self.jsinjector.add_js_file('libs/angular/js/angular-deps.js')
self.jsinjector.add_help_topic('wn_showAngularMainClasses()', 'Show AngularJS Main Classes')
self.jsinjector.add_help_topic('wn_showAngularAllClasses()', 'Show AngularJS All Classes')
self.jsinjector.add_js_file('libs/angular/js/angular-tools.js')
self.jsinjector.add_help_topic('wn_testNgResourceClasses()', 'Test ngResource Classes')
self.jsinjector.add_js_file('libs/angular/js/test-ngresource.js')
self.jsinjector.add_help_topic('wn_showAngularRoutes()', 'Show AngularJS URL Routes')
self.jsinjector.add_js_file('libs/angular/js/show-routes.js')
self.jsinjector.add_help_topic('wn_testHTTPClasses()', 'Test Angular Classes with get and query methods')
self.jsinjector.add_js_file('libs/angular/js/test-http.js')
| mit |
django-fluent/django-fluent-contents | fluent_contents/plugins/sharedcontent/templatetags/sharedcontent_tags.py | 2 | 5909 | from django.contrib.sites.models import Site
from django.core.cache import cache
from django.template import Library, TemplateSyntaxError
from django.utils.translation import get_language
from tag_parser.basetags import BaseAssignmentOrOutputNode
from fluent_contents import appsettings, rendering
from fluent_contents.plugins.sharedcontent.cache import (
get_shared_content_cache_key,
get_shared_content_cache_key_ptr,
)
from fluent_contents.plugins.sharedcontent.models import SharedContent
from fluent_contents.utils.templatetags import extract_literal, is_true
register = Library()
@register.tag("sharedcontent")
def sharedcontent(parser, token):
"""
Render a shared content block. Usage:
.. code-block:: django+html
{% sharedcontent "sidebar" %}
Optionally, a template can be used to render the content items:
.. code-block:: html+django
{% sharedcontent "sidebar" template="mysite/parts/slot_placeholder.html" %}
That template should loop over the content items, for example:
.. code-block:: html+django
{% for contentitem, html in contentitems %}
{% if not forloop.first %}<div class="splitter"></div>{% endif %}
{{ html }}
{% endfor %}
.. note::
When a template is used, the system assumes that the output can change per request.
Hence, the output of individual items will be cached, but the final merged output is no longer cached.
Add ``cachable=1`` to enable output caching for templates too.
"""
return SharedContentNode.parse(parser, token)
class SharedContentNode(BaseAssignmentOrOutputNode):
min_args = 1
max_args = 1
allowed_kwargs = ("template", "cachable")
@classmethod
def validate_args(cls, tag_name, *args, **kwargs):
if len(args) != 1:
raise TemplateSyntaxError(
"""{0} tag allows one arguments: 'slot name' and optionally: template="..".""".format(
tag_name
)
)
super(SharedContentNode, cls).validate_args(tag_name, *args)
def get_value(self, context, *tag_args, **tag_kwargs):
request = self.get_request(context)
output = None
# Process arguments
(slot,) = tag_args
template_name = tag_kwargs.get("template") or None
# cachable is default is True unless there is a template:
cachable = is_true(tag_kwargs.get("cachable", not bool(template_name)))
if template_name and cachable and not extract_literal(self.kwargs["template"]):
# If the template name originates from a variable, it can change any time.
# See PagePlaceholderNode.render_tag() why this is not allowed.
raise TemplateSyntaxError(
"{0} tag does not allow 'cachable' for variable template names!".format(
self.tag_name
)
)
# Caching will not happen when rendering via a template,
# because there is no way to tell whether that can be expired/invalidated.
try_cache = (
appsettings.FLUENT_CONTENTS_CACHE_OUTPUT
and appsettings.FLUENT_CONTENTS_CACHE_PLACEHOLDER_OUTPUT
and cachable
)
if isinstance(slot, SharedContent):
# Allow passing a sharedcontent, just like 'render_placeholder' does.
sharedcontent = slot
# See if there is cached output, avoid fetching the Placeholder via sharedcontents.contents.
if try_cache:
cache_key = get_shared_content_cache_key(sharedcontent)
output = cache.get(cache_key)
else:
site = Site.objects.get_current()
if try_cache:
# See if there is output cached, try to avoid fetching the SharedContent + Placeholder model.
# Have to perform 2 cache calls for this, because the placeholder output key is based on object IDs
cache_key_ptr = get_shared_content_cache_key_ptr(
int(site.pk), slot, language_code=get_language()
)
cache_key = cache.get(cache_key_ptr)
if cache_key is not None:
output = cache.get(cache_key)
if output is None:
# Get the placeholder
try:
sharedcontent = SharedContent.objects.parent_site(site).get(
slug=slot
)
except SharedContent.DoesNotExist:
return "<!-- shared content '{0}' does not yet exist -->".format(
slot
)
# Now that we've fetched the object, the object key be generated.
# No real need to check for output again, render_placeholder() does that already.
if try_cache and not cache_key:
cache.set(
cache_key_ptr, get_shared_content_cache_key(sharedcontent)
)
if output is None:
# Have to fetch + render it.
output = self.render_shared_content(
request, sharedcontent, template_name, cachable=cachable
)
# Need to track frontend media here, as the template tag can't return it.
rendering.register_frontend_media(request, output.media)
return output.html
def render_shared_content(
self, request, sharedcontent, template_name=None, cachable=None
):
# All parsing done, perform the actual rendering
placeholder = sharedcontent.contents # Another DB query
return rendering.render_placeholder(
request,
placeholder,
sharedcontent,
template_name=template_name,
cachable=cachable,
fallback_language=True,
)
| apache-2.0 |
gabrielrumiranda/coigosUteis | todosfib.py | 2 | 1616 | #Código que mostra a implementação do cálculo de Fibonacci de várias maneiras e compara suas velocidades
# -*- coding: utf-8 -*-
# Versao trivial
def fib_trivial(n):
if n == 1 or n == 2:
return 1
return fib_trivial(n-1) + fib_trivial(n-2)
# Versão com cache
cache = {}
def fib_cache(n):
if n == 1 or n == 2:
return 1
else:
if n not in cache:
cache[n] = fib_cache(n-1) + fib_cache(n-2)
return cache[n]
# Versão utilizando função, decorator
from functools import lru_cache
@lru_cache(maxsize=None)
def fib_func(n):
if n == 1 or n == 2:
return 1
return fib_func(n-1) + fib_func(n-2)
# numero que que será passado por parâmetro
numero = 35
print('Tempos para o número %d ...' %numero)
# importação do módulo time para pegar o tempo de cada execução
import time
# obtendo o tempo de Fibonacci Trivial
antes = time.time() # obtém o tempo antes da chamada da função
fib_trivial(numero) # chama a função fib_trivial
depois = time.time() # obtém o tempo depois da chamada da função
diff = depois - antes # obtém a diferença
print('Tempo Fibonacci trivial: %f segundos' %diff) # mostra o resultado
# iremos fazer coisa semelhante com as outras funções
# Obtendo o tempo de Fibonacci com cache
antes = time.time()
fib_cache(numero)
depois = time.time()
diff = depois - antes
print('Tempo Fibonacci com cache: %f segundos' %diff)
# Obtendo o tempo de Fibonacci com lru_cache
antes = time.time()
fib_func(numero)
depois = time.time()
diff = depois - antes
print('Tempo Fibonacci usando lru_cache: %f segundos' %diff)
| gpl-3.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.6/Lib/plat-irix5/torgb.py | 132 | 2869 | # Convert "arbitrary" image files to rgb files (SGI's image format).
# Input may be compressed.
# The uncompressed file type may be PBM, PGM, PPM, GIF, TIFF, or Sun raster.
# An exception is raised if the file is not of a recognized type.
# Returned filename is either the input filename or a temporary filename;
# in the latter case the caller must ensure that it is removed.
# Other temporary files used are removed by the function.
from warnings import warnpy3k
warnpy3k("the torgb module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import os
import tempfile
import pipes
import imghdr
table = {}
t = pipes.Template()
t.append('fromppm $IN $OUT', 'ff')
table['ppm'] = t
t = pipes.Template()
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['pnm'] = t
table['pgm'] = t
table['pbm'] = t
t = pipes.Template()
t.append('fromgif $IN $OUT', 'ff')
table['gif'] = t
t = pipes.Template()
t.append('tifftopnm', '--')
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['tiff'] = t
t = pipes.Template()
t.append('rasttopnm', '--')
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['rast'] = t
t = pipes.Template()
t.append('djpeg', '--')
t.append('(PATH=$PATH:/ufs/guido/bin/sgi; exec pnmtoppm)', '--')
t.append('fromppm $IN $OUT', 'ff')
table['jpeg'] = t
uncompress = pipes.Template()
uncompress.append('uncompress', '--')
class error(Exception):
pass
def torgb(filename):
temps = []
ret = None
try:
ret = _torgb(filename, temps)
finally:
for temp in temps[:]:
if temp != ret:
try:
os.unlink(temp)
except os.error:
pass
temps.remove(temp)
return ret
def _torgb(filename, temps):
if filename[-2:] == '.Z':
(fd, fname) = tempfile.mkstemp()
os.close(fd)
temps.append(fname)
sts = uncompress.copy(filename, fname)
if sts:
raise error, filename + ': uncompress failed'
else:
fname = filename
try:
ftype = imghdr.what(fname)
except IOError, msg:
if type(msg) == type(()) and len(msg) == 2 and \
type(msg[0]) == type(0) and type(msg[1]) == type(''):
msg = msg[1]
if type(msg) is not type(''):
msg = repr(msg)
raise error, filename + ': ' + msg
if ftype == 'rgb':
return fname
if ftype is None or not table.has_key(ftype):
raise error, '%s: unsupported image file type %r' % (filename, ftype)
(fd, temp) = tempfile.mkstemp()
os.close(fd)
sts = table[ftype].copy(fname, temp)
if sts:
raise error, filename + ': conversion to rgb failed'
return temp
| mit |
kennethgillen/ansible | contrib/inventory/docker.py | 36 | 33532 | #!/usr/bin/env python
#
# (c) 2016 Paul Durivage <[email protected]>
# Chris Houseknecht <[email protected]>
# James Tanner <[email protected]>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
Docker Inventory Script
=======================
The inventory script generates dynamic inventory by making API requests to one or more Docker APIs. It's dynamic
because the inventory is generated at run-time rather than being read from a static file. The script generates the
inventory by connecting to one or many Docker APIs and inspecting the containers it finds at each API. Which APIs the
script contacts can be defined using environment variables or a configuration file.
Requirements
------------
Using the docker modules requires having docker-py <https://docker-py.readthedocs.org/en/stable/>
installed on the host running Ansible. To install docker-py:
pip install docker-py
Run for Specific Host
---------------------
When run for a specific container using the --host option this script returns the following hostvars:
{
"ansible_ssh_host": "",
"ansible_ssh_port": 0,
"docker_apparmorprofile": "",
"docker_args": [],
"docker_config": {
"AttachStderr": false,
"AttachStdin": false,
"AttachStdout": false,
"Cmd": [
"/hello"
],
"Domainname": "",
"Entrypoint": null,
"Env": null,
"Hostname": "9f2f80b0a702",
"Image": "hello-world",
"Labels": {},
"OnBuild": null,
"OpenStdin": false,
"StdinOnce": false,
"Tty": false,
"User": "",
"Volumes": null,
"WorkingDir": ""
},
"docker_created": "2016-04-18T02:05:59.659599249Z",
"docker_driver": "aufs",
"docker_execdriver": "native-0.2",
"docker_execids": null,
"docker_graphdriver": {
"Data": null,
"Name": "aufs"
},
"docker_hostconfig": {
"Binds": null,
"BlkioWeight": 0,
"CapAdd": null,
"CapDrop": null,
"CgroupParent": "",
"ConsoleSize": [
0,
0
],
"ContainerIDFile": "",
"CpuPeriod": 0,
"CpuQuota": 0,
"CpuShares": 0,
"CpusetCpus": "",
"CpusetMems": "",
"Devices": null,
"Dns": null,
"DnsOptions": null,
"DnsSearch": null,
"ExtraHosts": null,
"GroupAdd": null,
"IpcMode": "",
"KernelMemory": 0,
"Links": null,
"LogConfig": {
"Config": {},
"Type": "json-file"
},
"LxcConf": null,
"Memory": 0,
"MemoryReservation": 0,
"MemorySwap": 0,
"MemorySwappiness": null,
"NetworkMode": "default",
"OomKillDisable": false,
"PidMode": "host",
"PortBindings": null,
"Privileged": false,
"PublishAllPorts": false,
"ReadonlyRootfs": false,
"RestartPolicy": {
"MaximumRetryCount": 0,
"Name": ""
},
"SecurityOpt": [
"label:disable"
],
"UTSMode": "",
"Ulimits": null,
"VolumeDriver": "",
"VolumesFrom": null
},
"docker_hostnamepath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/hostname",
"docker_hostspath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/hosts",
"docker_id": "9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14",
"docker_image": "0a6ba66e537a53a5ea94f7c6a99c534c6adb12e3ed09326d4bf3b38f7c3ba4e7",
"docker_logpath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/9f2f80b0a702361d1ac432e6a-json.log",
"docker_mountlabel": "",
"docker_mounts": [],
"docker_name": "/hello-world",
"docker_networksettings": {
"Bridge": "",
"EndpointID": "",
"Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"HairpinMode": false,
"IPAddress": "",
"IPPrefixLen": 0,
"IPv6Gateway": "",
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"MacAddress": "",
"Networks": {
"bridge": {
"EndpointID": "",
"Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"IPAddress": "",
"IPPrefixLen": 0,
"IPv6Gateway": "",
"MacAddress": ""
}
},
"Ports": null,
"SandboxID": "",
"SandboxKey": "",
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null
},
"docker_path": "/hello",
"docker_processlabel": "",
"docker_resolvconfpath": "/mnt/sda1/var/lib/docker/containers/9f2f80b0a702361d1ac432e6af816c19bda46da15c21264fb418c873de635a14/resolv.conf",
"docker_restartcount": 0,
"docker_short_id": "9f2f80b0a7023",
"docker_state": {
"Dead": false,
"Error": "",
"ExitCode": 0,
"FinishedAt": "2016-04-18T02:06:00.296619369Z",
"OOMKilled": false,
"Paused": false,
"Pid": 0,
"Restarting": false,
"Running": false,
"StartedAt": "2016-04-18T02:06:00.272065041Z",
"Status": "exited"
}
}
Groups
------
When run in --list mode (the default), container instances are grouped by:
- container id
- container name
- container short id
- image_name (image_<image name>)
- docker_host
- running
- stopped
Configuration:
--------------
You can control the behavior of the inventory script by passing arguments, defining environment variables, or
creating a configuration file named docker.yml (sample provided in ansible/contrib/inventory). The order of precedence
is command line args, then the docker.yml file and finally environment variables.
Environment variables:
......................
To connect to a single Docker API the following variables can be defined in the environment to control the connection
options. These are the same environment variables used by the Docker modules.
DOCKER_HOST
The URL or Unix socket path used to connect to the Docker API. Defaults to unix://var/run/docker.sock.
DOCKER_API_VERSION:
The version of the Docker API running on the Docker Host. Defaults to the latest version of the API supported
by docker-py.
DOCKER_TIMEOUT:
The maximum amount of time in seconds to wait on a response fromm the API. Defaults to 60 seconds.
DOCKER_TLS:
Secure the connection to the API by using TLS without verifying the authenticity of the Docker host server.
Defaults to False.
DOCKER_TLS_VERIFY:
Secure the connection to the API by using TLS and verifying the authenticity of the Docker host server.
Default is False
DOCKER_TLS_HOSTNAME:
When verifying the authenticity of the Docker Host server, provide the expected name of the server. Defaults
to localhost.
DOCKER_CERT_PATH:
Path to the directory containing the client certificate, client key and CA certificate.
DOCKER_SSL_VERSION:
Provide a valid SSL version number. Default value determined by docker-py, which at the time of this writing
was 1.0
In addition to the connection variables there are a couple variables used to control the execution and output of the
script:
DOCKER_CONFIG_FILE
Path to the configuration file. Defaults to ./docker.yml.
DOCKER_PRIVATE_SSH_PORT:
The private port (container port) on which SSH is listening for connections. Defaults to 22.
DOCKER_DEFAULT_IP:
The IP address to assign to ansible_host when the container's SSH port is mapped to interface '0.0.0.0'.
Configuration File
..................
Using a configuration file provides a means for defining a set of Docker APIs from which to build an inventory.
The default name of the file is derived from the name of the inventory script. By default the script will look for
basename of the script (i.e. docker) with an extension of '.yml'.
You can also override the default name of the script by defining DOCKER_CONFIG_FILE in the environment.
Here's what you can define in docker_inventory.yml:
defaults
Defines a default connection. Defaults will be taken from this and applied to any values not provided
for a host defined in the hosts list.
hosts
If you wish to get inventory from more than one Docker host, define a hosts list.
For the default host and each host in the hosts list define the following attributes:
host:
description: The URL or Unix socket path used to connect to the Docker API.
required: yes
tls:
description: Connect using TLS without verifying the authenticity of the Docker host server.
default: false
required: false
tls_verify:
description: Connect using TLS without verifying the authenticity of the Docker host server.
default: false
required: false
cert_path:
description: Path to the client's TLS certificate file.
default: null
required: false
cacert_path:
description: Use a CA certificate when performing server verification by providing the path to a CA certificate file.
default: null
required: false
key_path:
description: Path to the client's TLS key file.
default: null
required: false
version:
description: The Docker API version.
required: false
default: will be supplied by the docker-py module.
timeout:
description: The amount of time in seconds to wait on an API response.
required: false
default: 60
default_ip:
description: The IP address to assign to ansible_host when the container's SSH port is mapped to interface
'0.0.0.0'.
required: false
default: 127.0.0.1
private_ssh_port:
description: The port containers use for SSH
required: false
default: 22
Examples
--------
# Connect to the Docker API on localhost port 4243 and format the JSON output
DOCKER_HOST=tcp://localhost:4243 ./docker.py --pretty
# Any container's ssh port exposed on 0.0.0.0 will be mapped to
# another IP address (where Ansible will attempt to connect via SSH)
DOCKER_DEFAULT_IP=1.2.3.4 ./docker.py --pretty
# Run as input to a playbook:
ansible-playbook -i ~/projects/ansible/contrib/inventory/docker.py docker_inventory_test.yml
# Simple playbook to invoke with the above example:
- name: Test docker_inventory
hosts: all
connection: local
gather_facts: no
tasks:
- debug: msg="Container - {{ inventory_hostname }}"
'''
import os
import sys
import json
import argparse
import re
import yaml
from collections import defaultdict
# Manipulation of the path is needed because the docker-py
# module is imported by the name docker, and because this file
# is also named docker
for path in [os.getcwd(), '', os.path.dirname(os.path.abspath(__file__))]:
try:
del sys.path[sys.path.index(path)]
except:
pass
HAS_DOCKER_PY = True
HAS_DOCKER_ERROR = False
try:
from docker import Client
from docker.errors import APIError, TLSParameterError
from docker.tls import TLSConfig
from docker.constants import DEFAULT_TIMEOUT_SECONDS, DEFAULT_DOCKER_API_VERSION
except ImportError as exc:
HAS_DOCKER_ERROR = str(exc)
HAS_DOCKER_PY = False
DEFAULT_DOCKER_HOST = 'unix://var/run/docker.sock'
DEFAULT_TLS = False
DEFAULT_TLS_VERIFY = False
DEFAULT_IP = '127.0.0.1'
DEFAULT_SSH_PORT = '22'
BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1, True]
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0, False]
DOCKER_ENV_ARGS = dict(
config_file='DOCKER_CONFIG_FILE',
docker_host='DOCKER_HOST',
api_version='DOCKER_API_VERSION',
cert_path='DOCKER_CERT_PATH',
ssl_version='DOCKER_SSL_VERSION',
tls='DOCKER_TLS',
tls_verify='DOCKER_TLS_VERIFY',
timeout='DOCKER_TIMEOUT',
private_ssh_port='DOCKER_DEFAULT_SSH_PORT',
default_ip='DOCKER_DEFAULT_IP',
)
def fail(msg):
sys.stderr.write("%s\n" % msg)
sys.exit(1)
def log(msg, pretty_print=False):
if pretty_print:
print(json.dumps(msg, sort_keys=True, indent=2))
else:
print(msg + u'\n')
class AnsibleDockerClient(Client):
def __init__(self, auth_params, debug):
self.auth_params = auth_params
self.debug = debug
self._connect_params = self._get_connect_params()
try:
super(AnsibleDockerClient, self).__init__(**self._connect_params)
except APIError as exc:
self.fail("Docker API error: %s" % exc)
except Exception as exc:
self.fail("Error connecting: %s" % exc)
def fail(self, msg):
fail(msg)
def log(self, msg, pretty_print=False):
if self.debug:
log(msg, pretty_print)
def _get_tls_config(self, **kwargs):
self.log("get_tls_config:")
for key in kwargs:
self.log(" %s: %s" % (key, kwargs[key]))
try:
tls_config = TLSConfig(**kwargs)
return tls_config
except TLSParameterError as exc:
self.fail("TLS config error: %s" % exc)
def _get_connect_params(self):
auth = self.auth_params
self.log("auth params:")
for key in auth:
self.log(" %s: %s" % (key, auth[key]))
if auth['tls'] or auth['tls_verify']:
auth['docker_host'] = auth['docker_host'].replace('tcp://', 'https://')
if auth['tls'] and auth['cert_path'] and auth['key_path']:
# TLS with certs and no host verification
tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']),
verify=False,
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls']:
# TLS with no certs and not host verification
tls_config = self._get_tls_config(verify=False,
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls_verify'] and auth['cert_path'] and auth['key_path']:
# TLS with certs and host verification
if auth['cacert_path']:
tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']),
ca_cert=auth['cacert_path'],
verify=True,
assert_hostname=auth['tls_hostname'],
ssl_version=auth['ssl_version'])
else:
tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']),
verify=True,
assert_hostname=auth['tls_hostname'],
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls_verify'] and auth['cacert_path']:
# TLS with cacert only
tls_config = self._get_tls_config(ca_cert=auth['cacert_path'],
assert_hostname=auth['tls_hostname'],
verify=True,
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls_verify']:
# TLS with verify and no certs
tls_config = self._get_tls_config(verify=True,
assert_hostname=auth['tls_hostname'],
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
# No TLS
return dict(base_url=auth['docker_host'],
version=auth['api_version'],
timeout=auth['timeout'])
def _handle_ssl_error(self, error):
match = re.match(r"hostname.*doesn\'t match (\'.*\')", str(error))
if match:
msg = "You asked for verification that Docker host name matches %s. The actual hostname is %s. " \
"Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. " \
"You may also use TLS without verification by setting the tls parameter to true." \
% (self.auth_params['tls_hostname'], match.group(1))
self.fail(msg)
self.fail("SSL Exception: %s" % (error))
class EnvArgs(object):
def __init__(self):
self.config_file = None
self.docker_host = None
self.api_version = None
self.cert_path = None
self.ssl_version = None
self.tls = None
self.tls_verify = None
self.tls_hostname = None
self.timeout = None
self.default_ssh_port = None
self.default_ip = None
class DockerInventory(object):
def __init__(self):
self._args = self._parse_cli_args()
self._env_args = self._parse_env_args()
self.groups = defaultdict(list)
self.hostvars = defaultdict(dict)
def run(self):
config_from_file = self._parse_config_file()
if not config_from_file:
config_from_file = dict()
docker_hosts = self.get_hosts(config_from_file)
for host in docker_hosts:
client = AnsibleDockerClient(host, self._args.debug)
self.get_inventory(client, host)
if not self._args.host:
self.groups['docker_hosts'] = [host.get('docker_host') for host in docker_hosts]
self.groups['_meta'] = dict(
hostvars=self.hostvars
)
print(self._json_format_dict(self.groups, pretty_print=self._args.pretty))
else:
print(self._json_format_dict(self.hostvars.get(self._args.host, dict()), pretty_print=self._args.pretty))
sys.exit(0)
def get_inventory(self, client, host):
ssh_port = host.get('default_ssh_port')
default_ip = host.get('default_ip')
hostname = host.get('docker_host')
try:
containers = client.containers(all=True)
except Exception as exc:
self.fail("Error fetching containers for host %s - %s" % (hostname, str(exc)))
for container in containers:
id = container.get('Id')
short_id = id[:13]
try:
name = container.get('Names', list()).pop(0).lstrip('/')
except IndexError:
name = short_id
if not self._args.host or (self._args.host and self._args.host in [name, id, short_id]):
try:
inspect = client.inspect_container(id)
except Exception as exc:
self.fail("Error inspecting container %s - %s" % (name, str(exc)))
running = inspect.get('State', dict()).get('Running')
# Add container to groups
image_name = inspect.get('Config', dict()).get('Image')
if image_name:
self.groups["image_%s" % (image_name)].append(name)
self.groups[id].append(name)
self.groups[name].append(name)
if short_id not in self.groups:
self.groups[short_id].append(name)
self.groups[hostname].append(name)
if running is True:
self.groups['running'].append(name)
else:
self.groups['stopped'].append(name)
# Figure ous ssh IP and Port
try:
# Lookup the public facing port Nat'ed to ssh port.
port = client.port(container, ssh_port)[0]
except (IndexError, AttributeError, TypeError):
port = dict()
try:
ip = default_ip if port['HostIp'] == '0.0.0.0' else port['HostIp']
except KeyError:
ip = ''
facts = dict(
ansible_ssh_host=ip,
ansible_ssh_port=port.get('HostPort', int()),
docker_name=name,
docker_short_id=short_id
)
for key in inspect:
fact_key = self._slugify(key)
facts[fact_key] = inspect.get(key)
self.hostvars[name].update(facts)
def _slugify(self, value):
return 'docker_%s' % (re.sub('[^\w-]', '_', value).lower().lstrip('_'))
def get_hosts(self, config):
'''
Determine the list of docker hosts we need to talk to.
:param config: dictionary read from config file. can be empty.
:return: list of connection dictionaries
'''
hosts = list()
hosts_list = config.get('hosts')
defaults = config.get('defaults', dict())
self.log('defaults:')
self.log(defaults, pretty_print=True)
def_host = defaults.get('host')
def_tls = defaults.get('tls')
def_tls_verify = defaults.get('tls_verify')
def_tls_hostname = defaults.get('tls_hostname')
def_ssl_version = defaults.get('ssl_version')
def_cert_path = defaults.get('cert_path')
def_cacert_path = defaults.get('cacert_path')
def_key_path = defaults.get('key_path')
def_version = defaults.get('version')
def_timeout = defaults.get('timeout')
def_ip = defaults.get('default_ip')
def_ssh_port = defaults.get('private_ssh_port')
if hosts_list:
# use hosts from config file
for host in hosts_list:
docker_host = host.get('host') or def_host or self._args.docker_host or \
self._env_args.docker_host or DEFAULT_DOCKER_HOST
api_version = host.get('version') or def_version or self._args.api_version or \
self._env_args.api_version or DEFAULT_DOCKER_API_VERSION
tls_hostname = host.get('tls_hostname') or def_tls_hostname or self._args.tls_hostname or \
self._env_args.tls_hostname
tls_verify = host.get('tls_verify') or def_tls_verify or self._args.tls_verify or \
self._env_args.tls_verify or DEFAULT_TLS_VERIFY
tls = host.get('tls') or def_tls or self._args.tls or self._env_args.tls or DEFAULT_TLS
ssl_version = host.get('ssl_version') or def_ssl_version or self._args.ssl_version or \
self._env_args.ssl_version
cert_path = host.get('cert_path') or def_cert_path or self._args.cert_path or \
self._env_args.cert_path
if cert_path and cert_path == self._env_args.cert_path:
cert_path = os.path.join(cert_path, 'cert.pem')
cacert_path = host.get('cacert_path') or def_cacert_path or self._args.cacert_path or \
self._env_args.cert_path
if cacert_path and cacert_path == self._env_args.cert_path:
cacert_path = os.path.join(cacert_path, 'ca.pem')
key_path = host.get('key_path') or def_key_path or self._args.key_path or \
self._env_args.cert_path
if key_path and key_path == self._env_args.cert_path:
key_path = os.path.join(key_path, 'key.pem')
timeout = host.get('timeout') or def_timeout or self._args.timeout or self._env_args.timeout or \
DEFAULT_TIMEOUT_SECONDS
default_ip = host.get('default_ip') or def_ip or self._args.default_ip_address or \
DEFAULT_IP
default_ssh_port = host.get('private_ssh_port') or def_ssh_port or self._args.private_ssh_port or \
DEFAULT_SSH_PORT
host_dict = dict(
docker_host=docker_host,
api_version=api_version,
tls=tls,
tls_verify=tls_verify,
tls_hostname=tls_hostname,
cert_path=cert_path,
cacert_path=cacert_path,
key_path=key_path,
ssl_version=ssl_version,
timeout=timeout,
default_ip=default_ip,
default_ssh_port=default_ssh_port,
)
hosts.append(host_dict)
else:
# use default definition
docker_host = def_host or self._args.docker_host or self._env_args.docker_host or DEFAULT_DOCKER_HOST
api_version = def_version or self._args.api_version or self._env_args.api_version or \
DEFAULT_DOCKER_API_VERSION
tls_hostname = def_tls_hostname or self._args.tls_hostname or self._env_args.tls_hostname
tls_verify = def_tls_verify or self._args.tls_verify or self._env_args.tls_verify or DEFAULT_TLS_VERIFY
tls = def_tls or self._args.tls or self._env_args.tls or DEFAULT_TLS
ssl_version = def_ssl_version or self._args.ssl_version or self._env_args.ssl_version
cert_path = def_cert_path or self._args.cert_path or self._env_args.cert_path
if cert_path and cert_path == self._env_args.cert_path:
cert_path = os.path.join(cert_path, 'cert.pem')
cacert_path = def_cacert_path or self._args.cacert_path or self._env_args.cert_path
if cacert_path and cacert_path == self._env_args.cert_path:
cacert_path = os.path.join(cacert_path, 'ca.pem')
key_path = def_key_path or self._args.key_path or self._env_args.cert_path
if key_path and key_path == self._env_args.cert_path:
key_path = os.path.join(key_path, 'key.pem')
timeout = def_timeout or self._args.timeout or self._env_args.timeout or DEFAULT_TIMEOUT_SECONDS
default_ip = def_ip or self._args.default_ip_address or DEFAULT_IP
default_ssh_port = def_ssh_port or self._args.private_ssh_port or DEFAULT_SSH_PORT
host_dict = dict(
docker_host=docker_host,
api_version=api_version,
tls=tls,
tls_verify=tls_verify,
tls_hostname=tls_hostname,
cert_path=cert_path,
cacert_path=cacert_path,
key_path=key_path,
ssl_version=ssl_version,
timeout=timeout,
default_ip=default_ip,
default_ssh_port=default_ssh_port,
)
hosts.append(host_dict)
self.log("hosts: ")
self.log(hosts, pretty_print=True)
return hosts
def _parse_config_file(self):
config = dict()
config_path = None
if self._args.config_file:
config_path = self._args.config_file
elif self._env_args.config_file:
config_path = self._env_args.config_file
if config_path:
try:
config_file = os.path.abspath(config_path)
except:
config_file = None
if config_file and os.path.exists(config_file):
with open(config_file) as f:
try:
config = yaml.safe_load(f.read())
except Exception as exc:
self.fail("Error: parsing %s - %s" % (config_path, str(exc)))
return config
def log(self, msg, pretty_print=False):
if self._args.debug:
log(msg, pretty_print)
def fail(self, msg):
fail(msg)
def _parse_env_args(self):
args = EnvArgs()
for key, value in DOCKER_ENV_ARGS.items():
if os.environ.get(value):
val = os.environ.get(value)
if val in BOOLEANS_TRUE:
val = True
if val in BOOLEANS_FALSE:
val = False
setattr(args, key, val)
return args
def _parse_cli_args(self):
# Parse command line arguments
basename = os.path.splitext(os.path.basename(__file__))[0]
default_config = basename + '.yml'
parser = argparse.ArgumentParser(
description='Return Ansible inventory for one or more Docker hosts.')
parser.add_argument('--list', action='store_true', default=True,
help='List all containers (default: True)')
parser.add_argument('--debug', action='store_true', default=False,
help='Send debug messages to STDOUT')
parser.add_argument('--host', action='store',
help='Only get information for a specific container.')
parser.add_argument('--pretty', action='store_true', default=False,
help='Pretty print JSON output(default: False)')
parser.add_argument('--config-file', action='store', default=default_config,
help="Name of the config file to use. Default is %s" % (default_config))
parser.add_argument('--docker-host', action='store', default=None,
help="The base url or Unix sock path to connect to the docker daemon. Defaults to %s"
% (DEFAULT_DOCKER_HOST))
parser.add_argument('--tls-hostname', action='store', default='localhost',
help="Host name to expect in TLS certs. Defaults to 'localhost'")
parser.add_argument('--api-version', action='store', default=None,
help="Docker daemon API version. Defaults to %s" % (DEFAULT_DOCKER_API_VERSION))
parser.add_argument('--timeout', action='store', default=None,
help="Docker connection timeout in seconds. Defaults to %s"
% (DEFAULT_TIMEOUT_SECONDS))
parser.add_argument('--cacert-path', action='store', default=None,
help="Path to the TLS certificate authority pem file.")
parser.add_argument('--cert-path', action='store', default=None,
help="Path to the TLS certificate pem file.")
parser.add_argument('--key-path', action='store', default=None,
help="Path to the TLS encryption key pem file.")
parser.add_argument('--ssl-version', action='store', default=None,
help="TLS version number")
parser.add_argument('--tls', action='store_true', default=None,
help="Use TLS. Defaults to %s" % (DEFAULT_TLS))
parser.add_argument('--tls-verify', action='store_true', default=None,
help="Verify TLS certificates. Defaults to %s" % (DEFAULT_TLS_VERIFY))
parser.add_argument('--private-ssh-port', action='store', default=None,
help="Default private container SSH Port. Defaults to %s" % (DEFAULT_SSH_PORT))
parser.add_argument('--default-ip-address', action='store', default=None,
help="Default container SSH IP address. Defaults to %s" % (DEFAULT_IP))
return parser.parse_args()
def _json_format_dict(self, data, pretty_print=False):
# format inventory data for output
if pretty_print:
return json.dumps(data, sort_keys=True, indent=4)
else:
return json.dumps(data)
def main():
if not HAS_DOCKER_PY:
fail("Failed to import docker-py. Try `pip install docker-py` - %s" % (HAS_DOCKER_ERROR))
DockerInventory().run()
main()
| gpl-3.0 |
bebbi/closure-library | closure/bin/labs/code/generate_jsdoc_test.py | 212 | 3494 | #!/usr/bin/env python
#
# Copyright 2013 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required `by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit test for generate_jsdoc."""
__author__ = '[email protected] (Nathan Naze)'
import re
import unittest
import generate_jsdoc
class InsertJsDocTestCase(unittest.TestCase):
"""Unit test for source. Tests the parser on a known source input."""
def testMatchFirstFunction(self):
match = generate_jsdoc._MatchFirstFunction(_TEST_SOURCE)
self.assertNotEqual(None, match)
self.assertEqual('aaa, bbb, ccc', match.group('arguments'))
match = generate_jsdoc._MatchFirstFunction(_INDENTED_SOURCE)
self.assertNotEqual(None, match)
self.assertEqual('', match.group('arguments'))
match = generate_jsdoc._MatchFirstFunction(_ODD_NEWLINES_SOURCE)
self.assertEquals('goog.\nfoo.\nbar\n.baz.\nqux',
match.group('identifier'))
def testParseArgString(self):
self.assertEquals(
['foo', 'bar', 'baz'],
list(generate_jsdoc._ParseArgString('foo, bar, baz')))
def testExtractFunctionBody(self):
self.assertEquals(
'\n // Function comments.\n return;\n',
generate_jsdoc._ExtractFunctionBody(_TEST_SOURCE))
self.assertEquals(
'\n var bar = 3;\n return true;\n',
generate_jsdoc._ExtractFunctionBody(_INDENTED_SOURCE, 2))
def testContainsValueReturn(self):
self.assertTrue(generate_jsdoc._ContainsReturnValue(_INDENTED_SOURCE))
self.assertFalse(generate_jsdoc._ContainsReturnValue(_TEST_SOURCE))
def testInsertString(self):
self.assertEquals(
'abc123def',
generate_jsdoc._InsertString('abcdef', '123', 3))
def testInsertJsDoc(self):
self.assertEquals(
_EXPECTED_INDENTED_SOURCE,
generate_jsdoc.InsertJsDoc(_INDENTED_SOURCE))
self.assertEquals(
_EXPECTED_TEST_SOURCE,
generate_jsdoc.InsertJsDoc(_TEST_SOURCE))
self.assertEquals(
_EXPECTED_ODD_NEWLINES_SOURCE,
generate_jsdoc.InsertJsDoc(_ODD_NEWLINES_SOURCE))
_INDENTED_SOURCE = """\
boo.foo.woo = function() {
var bar = 3;
return true;
};
"""
_EXPECTED_INDENTED_SOURCE = """\
/**
* @return
*/
boo.foo.woo = function() {
var bar = 3;
return true;
};
"""
_TEST_SOURCE = """\
// Random comment.
goog.foo.bar = function (aaa, bbb, ccc) {
// Function comments.
return;
};
"""
_EXPECTED_TEST_SOURCE = """\
// Random comment.
/**
* @param {} aaa
* @param {} bbb
* @param {} ccc
*/
goog.foo.bar = function (aaa, bbb, ccc) {
// Function comments.
return;
};
"""
_ODD_NEWLINES_SOURCE = """\
goog.
foo.
bar
.baz.
qux
=
function
(aaa,
bbb, ccc) {
// Function comments.
return;
};
"""
_EXPECTED_ODD_NEWLINES_SOURCE = """\
/**
* @param {} aaa
* @param {} bbb
* @param {} ccc
*/
goog.
foo.
bar
.baz.
qux
=
function
(aaa,
bbb, ccc) {
// Function comments.
return;
};
"""
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
jayceyxc/hue | desktop/core/ext-py/boto-2.42.0/tests/integration/cloudsearch/test_layers.py | 130 | 2742 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Tests for Layer1 of Cloudsearch
"""
import time
from tests.unit import unittest
from boto.cloudsearch.layer1 import Layer1
from boto.cloudsearch.layer2 import Layer2
from boto.regioninfo import RegionInfo
class CloudSearchLayer1Test(unittest.TestCase):
cloudsearch = True
def setUp(self):
super(CloudSearchLayer1Test, self).setUp()
self.layer1 = Layer1()
self.domain_name = 'test-%d' % int(time.time())
def test_create_domain(self):
resp = self.layer1.create_domain(self.domain_name)
self.addCleanup(self.layer1.delete_domain, self.domain_name)
self.assertTrue(resp.get('created', False))
class CloudSearchLayer2Test(unittest.TestCase):
cloudsearch = True
def setUp(self):
super(CloudSearchLayer2Test, self).setUp()
self.layer2 = Layer2()
self.domain_name = 'test-%d' % int(time.time())
def test_create_domain(self):
domain = self.layer2.create_domain(self.domain_name)
self.addCleanup(domain.delete)
self.assertTrue(domain.created, False)
self.assertEqual(domain.domain_name, self.domain_name)
self.assertEqual(domain.num_searchable_docs, 0)
def test_initialization_regression(self):
us_west_2 = RegionInfo(
name='us-west-2',
endpoint='cloudsearch.us-west-2.amazonaws.com'
)
self.layer2 = Layer2(
region=us_west_2,
host='cloudsearch.us-west-2.amazonaws.com'
)
self.assertEqual(
self.layer2.layer1.host,
'cloudsearch.us-west-2.amazonaws.com'
)
| apache-2.0 |
chengduoZH/Paddle | python/paddle/fluid/tests/unittests/test_expand_op.py | 2 | 6481 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid as fluid
# Situation 1: expand_times is a list(without tensor)
class TestExpandOpRank1(OpTest):
def setUp(self):
self.op_type = "expand"
self.init_data()
self.inputs = {'X': np.random.random(self.ori_shape).astype("float32")}
self.attrs = {'expand_times': self.expand_times}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [12]
self.expand_times = [2]
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestExpandOpRank2_Corner(TestExpandOpRank1):
def init_data(self):
self.ori_shape = [12]
self.expand_times = [2]
class TestExpandOpRank2(TestExpandOpRank1):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [2, 3]
class TestExpandOpRank3_Corner(TestExpandOpRank1):
def init_data(self):
self.ori_shape = (2, 4, 5)
self.expand_times = (1, 1, 1)
class TestExpandOpRank3(TestExpandOpRank1):
def init_data(self):
self.ori_shape = (2, 4, 5)
self.expand_times = (2, 1, 4)
class TestExpandOpRank4(TestExpandOpRank1):
def init_data(self):
self.ori_shape = (2, 4, 5, 7)
self.expand_times = (3, 2, 1, 2)
# Situation 2: expand_times is a list(with tensor)
class TestExpandOpRank1_tensor_attr(OpTest):
def setUp(self):
self.op_type = "expand"
self.init_data()
expand_times_tensor = []
for index, ele in enumerate(self.expand_times):
expand_times_tensor.append(("x" + str(index), np.ones(
(1)).astype('int32') * ele))
self.inputs = {
'X': np.random.random(self.ori_shape).astype("float32"),
'expand_times_tensor': expand_times_tensor,
}
self.attrs = {"expand_times": self.infer_expand_times}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [12]
self.expand_times = [2]
self.infer_expand_times = [-1]
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestExpandOpRank2_Corner_tensor_attr(TestExpandOpRank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [1, 1]
self.infer_expand_times = [1, -1]
class TestExpandOpRank2_attr_tensor(TestExpandOpRank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [2, 3]
self.infer_expand_times = [-1, 3]
# Situation 3: expand_times is a tensor
class TestExpandOpRank1_tensor(OpTest):
def setUp(self):
self.op_type = "expand"
self.init_data()
self.inputs = {
'X': np.random.random(self.ori_shape).astype("float32"),
'ExpandTimes': np.array(self.expand_times).astype("int32"),
}
self.attrs = {}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [12]
self.expand_times = [2]
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestExpandOpRank2_tensor(TestExpandOpRank1_tensor):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [2, 3]
# Situation 4: input x is Integer
class TestExpandOpInteger(OpTest):
def setUp(self):
self.op_type = "expand"
self.inputs = {
'X': np.random.randint(
10, size=(2, 4, 5)).astype("int32")
}
self.attrs = {'expand_times': [2, 1, 4]}
output = np.tile(self.inputs['X'], (2, 1, 4))
self.outputs = {'Out': output}
def test_check_output(self):
self.check_output()
# Situation 5: input x is Bool
class TestExpandOpBoolean(OpTest):
def setUp(self):
self.op_type = "expand"
self.inputs = {'X': np.random.randint(2, size=(2, 4, 5)).astype("bool")}
self.attrs = {'expand_times': [2, 1, 4]}
output = np.tile(self.inputs['X'], (2, 1, 4))
self.outputs = {'Out': output}
def test_check_output(self):
self.check_output()
# Test python API
class TestExpandAPI(OpTest):
def test_api(self):
input = np.random.random([12, 14]).astype("float32")
x = fluid.layers.data(
name='x', shape=[12, 14], append_batch_size=False, dtype="float32")
positive_2 = fluid.layers.fill_constant([1], "int32", 2)
expand_times = fluid.layers.data(
name="expand_times", shape=[2], append_batch_size=False)
out_1 = fluid.layers.expand(x, expand_times=[2, 3])
out_2 = fluid.layers.expand(x, expand_times=[positive_2, 3])
out_3 = fluid.layers.expand(x, expand_times=expand_times)
exe = fluid.Executor(place=fluid.CPUPlace())
res_1, res_2, res_3 = exe.run(fluid.default_main_program(),
feed={
"x": input,
"expand_times":
np.array([1, 3]).astype("int32")
},
fetch_list=[out_1, out_2, out_3])
assert np.array_equal(res_1, np.tile(input, (2, 3)))
assert np.array_equal(res_2, np.tile(input, (2, 3)))
assert np.array_equal(res_3, np.tile(input, (1, 3)))
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
faridani/pyDoc | Unidecode/build/lib/unidecode/x064.py | 252 | 4655 | data = (
'Chan ', # 0x00
'Ge ', # 0x01
'Lou ', # 0x02
'Zong ', # 0x03
'Geng ', # 0x04
'Jiao ', # 0x05
'Gou ', # 0x06
'Qin ', # 0x07
'Yong ', # 0x08
'Que ', # 0x09
'Chou ', # 0x0a
'Chi ', # 0x0b
'Zhan ', # 0x0c
'Sun ', # 0x0d
'Sun ', # 0x0e
'Bo ', # 0x0f
'Chu ', # 0x10
'Rong ', # 0x11
'Beng ', # 0x12
'Cuo ', # 0x13
'Sao ', # 0x14
'Ke ', # 0x15
'Yao ', # 0x16
'Dao ', # 0x17
'Zhi ', # 0x18
'Nu ', # 0x19
'Xie ', # 0x1a
'Jian ', # 0x1b
'Sou ', # 0x1c
'Qiu ', # 0x1d
'Gao ', # 0x1e
'Xian ', # 0x1f
'Shuo ', # 0x20
'Sang ', # 0x21
'Jin ', # 0x22
'Mie ', # 0x23
'E ', # 0x24
'Chui ', # 0x25
'Nuo ', # 0x26
'Shan ', # 0x27
'Ta ', # 0x28
'Jie ', # 0x29
'Tang ', # 0x2a
'Pan ', # 0x2b
'Ban ', # 0x2c
'Da ', # 0x2d
'Li ', # 0x2e
'Tao ', # 0x2f
'Hu ', # 0x30
'Zhi ', # 0x31
'Wa ', # 0x32
'Xia ', # 0x33
'Qian ', # 0x34
'Wen ', # 0x35
'Qiang ', # 0x36
'Tian ', # 0x37
'Zhen ', # 0x38
'E ', # 0x39
'Xi ', # 0x3a
'Nuo ', # 0x3b
'Quan ', # 0x3c
'Cha ', # 0x3d
'Zha ', # 0x3e
'Ge ', # 0x3f
'Wu ', # 0x40
'En ', # 0x41
'She ', # 0x42
'Kang ', # 0x43
'She ', # 0x44
'Shu ', # 0x45
'Bai ', # 0x46
'Yao ', # 0x47
'Bin ', # 0x48
'Sou ', # 0x49
'Tan ', # 0x4a
'Sa ', # 0x4b
'Chan ', # 0x4c
'Suo ', # 0x4d
'Liao ', # 0x4e
'Chong ', # 0x4f
'Chuang ', # 0x50
'Guo ', # 0x51
'Bing ', # 0x52
'Feng ', # 0x53
'Shuai ', # 0x54
'Di ', # 0x55
'Qi ', # 0x56
'Sou ', # 0x57
'Zhai ', # 0x58
'Lian ', # 0x59
'Tang ', # 0x5a
'Chi ', # 0x5b
'Guan ', # 0x5c
'Lu ', # 0x5d
'Luo ', # 0x5e
'Lou ', # 0x5f
'Zong ', # 0x60
'Gai ', # 0x61
'Hu ', # 0x62
'Zha ', # 0x63
'Chuang ', # 0x64
'Tang ', # 0x65
'Hua ', # 0x66
'Cui ', # 0x67
'Nai ', # 0x68
'Mo ', # 0x69
'Jiang ', # 0x6a
'Gui ', # 0x6b
'Ying ', # 0x6c
'Zhi ', # 0x6d
'Ao ', # 0x6e
'Zhi ', # 0x6f
'Nie ', # 0x70
'Man ', # 0x71
'Shan ', # 0x72
'Kou ', # 0x73
'Shu ', # 0x74
'Suo ', # 0x75
'Tuan ', # 0x76
'Jiao ', # 0x77
'Mo ', # 0x78
'Mo ', # 0x79
'Zhe ', # 0x7a
'Xian ', # 0x7b
'Keng ', # 0x7c
'Piao ', # 0x7d
'Jiang ', # 0x7e
'Yin ', # 0x7f
'Gou ', # 0x80
'Qian ', # 0x81
'Lue ', # 0x82
'Ji ', # 0x83
'Ying ', # 0x84
'Jue ', # 0x85
'Pie ', # 0x86
'Pie ', # 0x87
'Lao ', # 0x88
'Dun ', # 0x89
'Xian ', # 0x8a
'Ruan ', # 0x8b
'Kui ', # 0x8c
'Zan ', # 0x8d
'Yi ', # 0x8e
'Xun ', # 0x8f
'Cheng ', # 0x90
'Cheng ', # 0x91
'Sa ', # 0x92
'Nao ', # 0x93
'Heng ', # 0x94
'Si ', # 0x95
'Qian ', # 0x96
'Huang ', # 0x97
'Da ', # 0x98
'Zun ', # 0x99
'Nian ', # 0x9a
'Lin ', # 0x9b
'Zheng ', # 0x9c
'Hui ', # 0x9d
'Zhuang ', # 0x9e
'Jiao ', # 0x9f
'Ji ', # 0xa0
'Cao ', # 0xa1
'Dan ', # 0xa2
'Dan ', # 0xa3
'Che ', # 0xa4
'Bo ', # 0xa5
'Che ', # 0xa6
'Jue ', # 0xa7
'Xiao ', # 0xa8
'Liao ', # 0xa9
'Ben ', # 0xaa
'Fu ', # 0xab
'Qiao ', # 0xac
'Bo ', # 0xad
'Cuo ', # 0xae
'Zhuo ', # 0xaf
'Zhuan ', # 0xb0
'Tuo ', # 0xb1
'Pu ', # 0xb2
'Qin ', # 0xb3
'Dun ', # 0xb4
'Nian ', # 0xb5
'[?] ', # 0xb6
'Xie ', # 0xb7
'Lu ', # 0xb8
'Jiao ', # 0xb9
'Cuan ', # 0xba
'Ta ', # 0xbb
'Han ', # 0xbc
'Qiao ', # 0xbd
'Zhua ', # 0xbe
'Jian ', # 0xbf
'Gan ', # 0xc0
'Yong ', # 0xc1
'Lei ', # 0xc2
'Kuo ', # 0xc3
'Lu ', # 0xc4
'Shan ', # 0xc5
'Zhuo ', # 0xc6
'Ze ', # 0xc7
'Pu ', # 0xc8
'Chuo ', # 0xc9
'Ji ', # 0xca
'Dang ', # 0xcb
'Suo ', # 0xcc
'Cao ', # 0xcd
'Qing ', # 0xce
'Jing ', # 0xcf
'Huan ', # 0xd0
'Jie ', # 0xd1
'Qin ', # 0xd2
'Kuai ', # 0xd3
'Dan ', # 0xd4
'Xi ', # 0xd5
'Ge ', # 0xd6
'Pi ', # 0xd7
'Bo ', # 0xd8
'Ao ', # 0xd9
'Ju ', # 0xda
'Ye ', # 0xdb
'[?] ', # 0xdc
'Mang ', # 0xdd
'Sou ', # 0xde
'Mi ', # 0xdf
'Ji ', # 0xe0
'Tai ', # 0xe1
'Zhuo ', # 0xe2
'Dao ', # 0xe3
'Xing ', # 0xe4
'Lan ', # 0xe5
'Ca ', # 0xe6
'Ju ', # 0xe7
'Ye ', # 0xe8
'Ru ', # 0xe9
'Ye ', # 0xea
'Ye ', # 0xeb
'Ni ', # 0xec
'Hu ', # 0xed
'Ji ', # 0xee
'Bin ', # 0xef
'Ning ', # 0xf0
'Ge ', # 0xf1
'Zhi ', # 0xf2
'Jie ', # 0xf3
'Kuo ', # 0xf4
'Mo ', # 0xf5
'Jian ', # 0xf6
'Xie ', # 0xf7
'Lie ', # 0xf8
'Tan ', # 0xf9
'Bai ', # 0xfa
'Sou ', # 0xfb
'Lu ', # 0xfc
'Lue ', # 0xfd
'Rao ', # 0xfe
'Zhi ', # 0xff
)
| mit |
Ensembles/ert | python/python/ert/util/stat.py | 2 | 2312 | # Copyright (C) 2011 Statoil ASA, Norway.
#
# The file 'stat.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from collections import Sequence
from cwrap import PrototypeError
from ert.util import LLSQResultEnum, UtilPrototype, Matrix
quantile = UtilPrototype("double statistics_empirical_quantile(double_vector, double)")
"""@type: (ert.util.DoubleVector, float)->float"""
quantile_sorted = UtilPrototype("double statistics_empirical_quantile(double_vector, double)")
"""@type: (ert.util.DoubleVector, float)->float"""
try:
_polyfit = UtilPrototype("llsq_result_enum matrix_stat_polyfit(matrix, matrix, matrix, matrix)")
except PrototypeError:
_polyfit = None
def polyfit(n, x, y, s=None):
"""
@type n: int
@type x: Matrix or Sequence
@type y: Matrix or Sequence
@type s: Matrix or Sequence or None
@return: tuple
"""
if _polyfit is None:
raise NotImplementedError("Sorry - your ert distribution has been built without lapack support")
if isinstance(x, Matrix):
xm = x
else:
xm = Matrix(len(x), 1)
for i in range(len(x)):
xm[i, 0] = x[i]
if isinstance(y, Matrix):
ym = y
else:
ym = Matrix(len(y), 1)
for i in range(len(y)):
ym[i, 0] = y[i]
if s:
if isinstance(s, Matrix):
sm = s
else:
sm = Matrix(len(s), 1)
for i in range(len(s)):
sm[i, 0] = s[i]
else:
sm = s
beta = Matrix(n, 1)
res = _polyfit(beta, xm, ym, sm)
if not res == LLSQResultEnum.LLSQ_SUCCESS:
raise Exception("Linear Least Squares Estimator failed?")
l = []
for i in range(n):
l.append(beta[i, 0])
return tuple(l)
| gpl-3.0 |
samabhi/pstHealth | venv/lib/python2.7/site-packages/django/contrib/redirects/migrations/0001_initial.py | 308 | 1561 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Redirect',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('site', models.ForeignKey(
to='sites.Site',
to_field='id',
on_delete=models.CASCADE,
verbose_name='site',
)),
('old_path', models.CharField(
help_text=(
"This should be an absolute path, excluding the domain name. Example: '/events/search/'."
), max_length=200, verbose_name='redirect from', db_index=True
)),
('new_path', models.CharField(
help_text="This can be either an absolute path (as above) or a full URL starting with 'http://'.",
max_length=200, verbose_name='redirect to', blank=True
)),
],
options={
'ordering': ('old_path',),
'unique_together': set([('site', 'old_path')]),
'db_table': 'django_redirect',
'verbose_name': 'redirect',
'verbose_name_plural': 'redirects',
},
bases=(models.Model,),
),
]
| mit |
LockScreen/Backend | venv/lib/python2.7/site-packages/botocore/docs/sharedexample.py | 1 | 9129 | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import re
import numbers
from botocore.utils import parse_timestamp
from datetime import datetime
class SharedExampleDocumenter(object):
def document_shared_example(self, example, prefix, section,
operation_model):
"""Documents a single shared example based on its definition.
:param example: The model of the example
:param prefix: The prefix to use in the method example.
:param section: The section to write to.
:param operation_model: The model of the operation used in the example
"""
section.style.new_paragraph()
section.write(example.get('description'))
section.style.new_line()
self.document_input(section, example, prefix,
operation_model.input_shape)
self.document_output(section, example, operation_model.output_shape)
def document_input(self, section, example, prefix, shape):
input_section = section.add_new_section('input')
input_section.style.start_codeblock()
if prefix is not None:
input_section.write(prefix)
params = example['input']
comments = example.get('comments')
if comments:
comments = comments.get('input')
param_section = input_section.add_new_section('parameters')
self._document_params(param_section, params, comments, [], shape)
closing_section = input_section.add_new_section('input-close')
closing_section.style.new_line()
closing_section.style.new_line()
closing_section.write('print(response)')
closing_section.style.end_codeblock()
def document_output(self, section, example, shape):
output_section = section.add_new_section('output')
output_section.writeln('Expected Output:')
output_section.style.start_codeblock()
params = example.get('output', {})
# There might not be an output, but we will return metadata anyway
params['ResponseMetadata'] = {"...": "..."}
comments = example.get('comments')
if comments:
comments = comments.get('output')
self._document_dict(output_section, params, comments, [], shape, True)
closing_section = output_section.add_new_section('output-close')
closing_section.style.end_codeblock()
def _document(self, section, value, comments, path, shape):
"""
:param section: The section to add the docs to.
:param value: The input / output values representing the parameters that
are included in the example.
:param comments: The dictionary containing all the comments to be
applied to the example.
:param path: A list describing where the documenter is in traversing the
parameters. This is used to find the equivalent location
in the comments dictionary.
"""
if isinstance(value, dict):
self._document_dict(section, value, comments, path, shape)
elif isinstance(value, list):
self._document_list(section, value, comments, path, shape)
elif isinstance(value, numbers.Number):
self._document_number(section, value, path)
elif shape and shape.type_name == 'timestamp':
self._document_datetime(section, value, path)
else:
self._document_str(section, value, path)
def _document_dict(self, section, value, comments, path, shape,
top_level=False):
dict_section = section.add_new_section('dict-value')
self._start_nested_value(dict_section, '{')
for key, val in value.items():
path.append('.%s' % key)
item_section = dict_section.add_new_section(key)
item_section.style.new_line()
item_comment = self._get_comment(path, comments)
if item_comment:
item_section.write(item_comment)
item_section.style.new_line()
item_section.write("'%s': " % key)
# Shape could be none if there is no output besides ResponseMetadata
item_shape = None
if shape:
if shape.type_name == 'structure':
item_shape = shape.members.get(key)
elif shape.type_name == 'map':
item_shape = shape.value
self._document(item_section, val, comments, path, item_shape)
path.pop()
dict_section_end = dict_section.add_new_section('ending-brace')
self._end_nested_value(dict_section_end, '}')
if not top_level:
dict_section_end.write(',')
def _document_params(self, section, value, comments, path, shape):
param_section = section.add_new_section('param-values')
self._start_nested_value(param_section, '(')
for key, val in value.items():
path.append('.%s' % key)
item_section = param_section.add_new_section(key)
item_section.style.new_line()
item_comment = self._get_comment(path, comments)
if item_comment:
item_section.write(item_comment)
item_section.style.new_line()
item_section.write(key + '=')
# Shape could be none if there are no input parameters
item_shape = None
if shape:
item_shape = shape.members.get(key)
self._document(item_section, val, comments, path, item_shape)
path.pop()
param_section_end = param_section.add_new_section('ending-parenthesis')
self._end_nested_value(param_section_end, ')')
def _document_list(self, section, value, comments, path, shape):
list_section = section.add_new_section('list-section')
self._start_nested_value(list_section, '[')
item_shape = shape.member
for index, val in enumerate(value):
item_section = list_section.add_new_section(index)
item_section.style.new_line()
path.append('[%s]' % index)
item_comment = self._get_comment(path, comments)
if item_comment:
item_section.write(item_comment)
item_section.style.new_line()
self._document(item_section, val, comments, path, item_shape)
path.pop()
list_section_end = list_section.add_new_section('ending-bracket')
self._end_nested_value(list_section_end, '],')
def _document_str(self, section, value, path):
# We do the string conversion because this might accept a type that
# we don't specifically address.
section.write("'%s'," % str(value))
def _document_number(self, section, value, path):
section.write("%s," % str(value))
def _document_datetime(self, section, value, path):
datetime_tuple = parse_timestamp(value).timetuple()
datetime_str = str(datetime_tuple[0])
for i in range(1, len(datetime_tuple)):
datetime_str += ", " + str(datetime_tuple[i])
section.write("datetime(%s)," % datetime_str)
def _get_comment(self, path, comments):
key = re.sub('^\.', '', ''.join(path))
if comments and key in comments:
return '# ' + comments[key]
else:
return ''
def _start_nested_value(self, section, start):
section.write(start)
section.style.indent()
section.style.indent()
def _end_nested_value(self, section, end):
section.style.dedent()
section.style.dedent()
section.style.new_line()
section.write(end)
def document_shared_examples(section, operation_model, example_prefix,
shared_examples):
"""Documents the shared examples
:param section: The section to write to.
:param operation_model: The model of the operation.
:param example_prefix: The prefix to use in the method example.
:param shared_examples: The shared JSON examples from the model.
"""
container_section = section.add_new_section('shared-examples')
container_section.style.new_paragraph()
container_section.style.bold('Examples')
documenter = SharedExampleDocumenter()
for example in shared_examples:
documenter.document_shared_example(
example=example,
section=container_section.add_new_section(example['id']),
prefix=example_prefix,
operation_model=operation_model
)
| mit |
ElDeveloper/qiime | tests/test_parallel/test_identify_chimeric_seqs.py | 15 | 72170 | #!/usr/bin/env python
from __future__ import division
__author__ = "Jai Ram Rideout"
__copyright__ = "Copyright 2012, The QIIME project"
__credits__ = ["Jai Ram Rideout"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Jai Ram Rideout"
__email__ = "[email protected]"
from shutil import rmtree
from glob import glob
from os import getenv
from os.path import basename, exists, join, isfile, getsize
from tempfile import NamedTemporaryFile, mkdtemp
from unittest import TestCase, main
from skbio.util import remove_files
from qiime.util import (get_qiime_temp_dir, load_qiime_config)
from qiime.test import initiate_timeout, disable_timeout
from qiime.parse import fields_to_dict
from qiime.parallel.identify_chimeric_seqs import ParallelChimericSequenceIdentifier
class ParallelChimericSequenceIdentifierTests(TestCase):
def setUp(self):
""" """
self.dirs_to_remove = []
tmp_dir = get_qiime_temp_dir()
self.test_out = mkdtemp(dir=tmp_dir,
prefix='qiime_parallel_chimeric_sequence_identifier_tests_',
suffix='')
self.dirs_to_remove.append(self.test_out)
self.in_seqs_f = NamedTemporaryFile(
prefix='qiime_parallel_chimeric_sequence_identifier_tests_input',
suffix='.fasta', dir=tmp_dir)
self.in_seqs_f.write(in_seqs)
self.in_seqs_f.seek(0)
self.reference_seqs_f = NamedTemporaryFile(
prefix='qiime_parallel_chimeric_sequence_identifier_tests_refs',
suffix='.fasta', dir=tmp_dir)
self.reference_seqs_f.write(ref_seqs)
self.reference_seqs_f.seek(0)
self.id_to_tax_f = NamedTemporaryFile(
prefix='qiime_parallel_chimeric_sequence_identifier_tests_id_tax',
suffix='.txt', dir=tmp_dir)
self.id_to_tax_f.write(id_to_tax)
self.id_to_tax_f.seek(0)
self.in_seqs_aligned_f = NamedTemporaryFile(
prefix='qiime_parallel_chimeric_sequence_identifier_tests_aligned',
suffix='.fasta', dir=tmp_dir)
self.in_seqs_aligned_f.write(in_seqs_aligned)
self.in_seqs_aligned_f.seek(0)
initiate_timeout(180)
def tearDown(self):
""" """
disable_timeout()
for d in self.dirs_to_remove:
if exists(d):
rmtree(d)
def test_parallel_chimeric_sequence_identifier_blast_fragments(self):
"""Test ParallelChimericSequenceIdentifier using blast_fragments."""
params = {
'id_to_taxonomy_fp': self.id_to_tax_f.name,
'reference_seqs_fp': self.reference_seqs_f.name,
'chimera_detection_method': 'blast_fragments',
'num_fragments': 3,
'taxonomy_depth': 4,
'max_e_value': 1e-30,
'min_div_ratio': None,
'output_fp': self.test_out + '/blast_fragments_out.txt'
}
app = ParallelChimericSequenceIdentifier()
r = app(self.in_seqs_f.name,
self.test_out,
params,
job_prefix='CHIMTEST',
poll_directly=True,
suppress_submit_jobs=False)
# We should get an empty file.
results = [line for line in open(join(self.test_out,
'blast_fragments_out.txt'), 'U')]
self.assertEqual(results, [])
def test_parallel_chimeric_sequence_identifier_chimera_slayer(self):
"""Test ParallelChimericSequenceIdentifier using ChimeraSlayer."""
qiime_config = load_qiime_config()
params = {
'reference_seqs_fp': None,
'aligned_reference_seqs_fp': qiime_config['pynast_template_alignment_fp'],
'chimera_detection_method': 'ChimeraSlayer',
'num_fragments': 3,
'taxonomy_depth': 4,
'max_e_value': 1e-30,
'min_div_ratio': None,
'output_fp': self.test_out + '/ChimeraSlayer_out.txt'
}
app = ParallelChimericSequenceIdentifier()
r = app(self.in_seqs_aligned_f.name,
self.test_out,
params,
job_prefix='CHIMTEST',
poll_directly=True,
suppress_submit_jobs=False)
output_filepath = join(self.test_out, 'ChimeraSlayer_out.txt')
self.assertTrue(isfile(output_filepath) and
(getsize(output_filepath) > 0))
# This test data is taken from qiime_test_data.
in_seqs = """
>11472286
GATGAACGCTGGCGGCATGCTTAACACATGCAAGTCGAACGGAACACTTTGTGTTTTGAGTTAATAGTTCGATAGTAGATAGTAAATAGTGAACACTATGAACTAGTAAACTATTTAACTAGAAACTCTTAAACGCAGAGCGTTTAGTGGCGAACGGGTGAGTAATACATTGGTATCTACCTCGGAGAAGGACATAGCCTGCCGAAAGGTGGGGTAATTTCCTATAGTCCCCGCACATATTTGTTCTTAAATCTGTTAAAATGATTATATGTTTTATGTTTATTTGATAAAAAGCAGCAAGACAAATGAGTTTTATATTGGTTATACAGCAGATTTAAAAAATAGAATTAGGTCTCATAATCAGGGAGAAAACAAATCAACTAAATCTAAAATACCTTGGGAATTGGTTTACTATGAAGCCTACAAAAACCAAACATCAGCAAGGGTTAGAGAATCAAAGTTGAAACATTATGGGCAATCATTAACTAGACTTAAGAGAAGAATTGGTTTTTGAGAACAAATATGTGCGGGGTAAAGCAGCAATGCGCTCCGAGAGGAACCTCTGTCCTATCAGCTTGTTGGTAAGGTAATGGCTTACCAAGGCGACGACGGGTAGCTGGTGTGAGAGCACGACCAGCCACACTGGGACTGAGACACGGCCCAGACTCCTACGGGAGGCAGCAGTGAGGAATTTTCCACAATGGGCGCAAGCCTGATGGAGCAATGCCGCGTGAAGGATGAAGATTTTCGGATTGTAAACTTCTTTTAAGTAGGAAGATTATGACGGTACTACTTGAATAAGCATCGGCTAACTACGTGCCAGCAGCCGCGGTAATACGTAGGATGCAAGCGTTATCCGGAATTACTGGGCGTAAAGCGTGTGTAGGTGGTTTATTAAGTTAAATGTTAAATTTTCAGGCTTAACTTGGAAACCGCATTTAATACTGGTAGACTTTGAGGACAAGAGAGGCAGGCGGAATTAGCGGAGTAGCGGTGAAATGCGTAGATATCGCTAAGAACACCAATGGCGAAGGCAGCCTGCTGGTTTGCACCTGACACTGAGATACGAAAGCGTGGGGAGCGAACGGGATTAGATACCCCGGTAGTCCACGCCGTAAACGATGGTCACTAGCTGTTAGGGGCTCGACCCCTTTAGTAGCGAAGCTAACGCGTTAAGTGACCCGCCTGGGGAGTACGATCGCAAGATTAAAACTCAAAGGAATTGACGGGGACCCGCACAAGCGGTGGAACGTGAGGTTTAATTCGTCTCTAAGCGAAAAACCTTACCGAGGCTTGACATCTCCGGAAGACCTTAGAAATAAGGTTGTGCCCGAAAGGGAGCCGGATGACAGGTGCTGCATGGCTGTCGTCAGCTCGTGTTGTGAAATGTTCGGTTAAGTCCGTTAACGAGCGCAACCCTTGCTGTGTGTTGTATTTTTCACACAGGACTATCCTGGTCAACAGGGAGGAAGGTGGGGATGACGTCAAGTCAGCATGGCTCTTACGCCTCGGGCTACACTCGCGTTACAATGGCCGGTACAATGGGCTGCCAACTCGTAAGGGGGAGCTAATCCCATCAAAACCGGTCCCAGTTCGGATTGAGGGCTGCAATTCGCCCTCATGAAGTCGGAATCGCTAGTAACCGCGAATCAGCACGTCGCGGTGAATGCGTTCTCGGGTCTTGTACACACTGCCCGTCACACCACGAAAGTTAGTAACGCCCGAAGTGCCCTGTATGGGGTCCTAAGGTGGGGCTAGCGATTGGGGTG
>11472384
AGAGTTTGATCCTGGCTCAGATTGAACGCTGGCGGCATGCCTTACACATGCAAGTCGAACGGCAGCACGGGGGCAACCCTGGTGGCGAGTGGCGAACGGGTGAGTAATACATCGGAACGTGTCCTGTAGTGGGGGATAGCCCGGCGAAAGCCGGATTAATACCGCATACGCTCTACGGAGGAAAGGGGGGGATCTTAGGACCTCCCGCTACAGGGGCGGCCGATGGCAGATTAGCTAGTTGGTGGGGTAAAGGCCTACCAAGGCGACGATCTGTAGCTGGTCTGAGAGGACGACCAGCCACACTGGGACTGAGACACGGCCCAGACTCCTACGGGAGGCAGCAGTGGGGAATTTTGGACAATGGGGGCAACCCTGATCCAGCAATGCCGCGTGTGTGAAGAAGGCCTTCGGGTTGTAAAGCACTTTTGTCCGGAAAGAAAACGCCGTGGTTAATACCCGTGGCGGATGACGGTACCGGAAGAATAAGCACCGGCTAACTACGTGCCAGCAGCCGCGGTAATACGTAGGGTGCAAGCGTTAATCGGAATTACTGGGCGTAAAGCGTGCGCAGGCGGTCCGCTAAGACAGATGTGAAATCCCCGGGCTTAACCTGGGAACTGCATTTGTGACTGGCGGGCTAGAGTATGGCAGAGGGGGGTAGAATTCCACGTGTAGCAGTGAAATGCGTAGAGATGTGGAGGAATACCGATGGCGAAGGCAGCCCCCTGGGCCAATACTGACGCTCATGCACGAAAGCGTGGGGAGCAAACAGGATTAGATACCCTGGTAGTCCACGCCCTAAACGATGTCAACTAGTTGTCGGGTCTTCATTGACTTGGTAACGTAGCTAACGCGTGAAGTTGACCGCCTGGGGAGTACGGTCGCAAGATTAAAACTCAAAGGAATTGACGGGGACCCGCACAAGCGGTGGATGATGTGGATTAATTCGATGCAACGCGAAAAACCTTACCTACCCTTGACATGTATGGAATCCTGCTGAGAGGTGGGAGTGCCCGAAAGGGAGCCATAACACAGGTGCTGCATGGCTGTCGTCAGCTCGTGTCGTGAGATGTTGGGTTAAGTCCCGCAACGAGCGCAACCCTTGTCCCTAGTTGCTACGCAAGAGCACTCTAGGGAGACTGCCGGTGACAAACCGGAGGAAGGTGGGGATGACGTCAAGTCCTCATGGCCCTTATGGGTAGGGCTTCACACGTCATACAATGGTCGGAACAGAGGGTCGCCAACCCGCGAGGGGGAGCCAATCCCAGAAAACCGATCGTAGTCCGGATCGCACTCTGCAACTCGAGTGCGTGAAGCTGGAATCGCTAGTAATCGCGGATCAGCATGCCGCGGTGAATACGTTCCCGGGTCTTGTACACACCGCCCGTCACACCATGGGAGTGGGTTTTACCAGAAGTGGCTAGTCTAACCGCAAGGAGGACGGTCACCACGGTAGGATTCATGACTGGGGTGAAGTCGTAACAAGGTAGCCGTATCGGAAGGTGCGGCTGGATCACCTCCTTTCTCGAGCGAACGTGTCGAACGTTGAGCGCTCACGCTTATCGGCTGTGAAATTAGGACAGTAAGTCAGACAGACTGAGGGGTCTGTAGCTCAGTCGGTTAGAGCACCGTCTTGATAAGGCGGGGGTCGATGGTTCGAATCCATCCAGACCCACCATTGTCT
>11468680
TAAACTGAAGAGTTTGATCCTGGCTCAGATTGAACGCTGGCGGCATGCCTTACACATGCAAGTCGAACGGCAGCACGGGTGCTTGCACCTGGTGGCGAGTGGCGAACGGGTGAGTAATACATCGGAACATGTCCTGTAGTGGGGGATAGCCCGGCGAAAGCCGGATTAATACCGCATACGATCTACGGATGAAAGCGGGGGACCTTCGGGCCTCGCGCTATAGGGTTGGCCGATGGCTGATTAGCTAGTTGGTGGGGTAAAGGCCTACCAAGGCGACGATCAGTAGCTGGTCTGAGAGGACGACCAGCCACACTGGGACTGAGACACGGCCCAGACTCCTACGGGAGGCAGCAGTGGGGAATTTTGGACAATGGGCGAAAGCCTGATCCAGCAATGCCGCGTGTGTGAAGAAGGCCTTCGGGTTGTAAAGCACTTTTGTCCGGAAAGAAATCCTTGGCTCTAATACAGTCGGGGGATGACGGTACCGGAAGAATAAGCACCGGCTAACTACGTGCCAGCAGCCGCGGTAATACGTAGGGTGCGAGCGTTAATCGGAATTACTGGGCGTAAAGCGTGCGCAGGCGGTTTGCTAAGACCGATGTGAAATCCCCGGGCTCAACCTGGGAACTGCATTGGTGACTGGCAGGCTAGAGTATGGCAGAGGGGGGTAGAATTCCACGTGTAGCAGTGAAATGCGTAGAGATGTGGAGGAATACCGATGGCGAAGGCAGCCCCCTGGGCCAATACTGACGCTCATGCACGAAAGCGTGGGGAGCAAACAGGATTAGATACCCTGGTAGTCCACGCCCTAAACGATGTCAACTAGTTGTTGGGGATTCATTTCCTTAGTAACGTAGCTAACGCGTGAAGTTGACCGCCTGGGGAGTACGGTCGCAAGATTAAAACTCAAAGGAATTGACGGGGACCCGCACAAGCGGTGGATGATGTGGATTAATTCGATGCAACGCGAAAAACCTTACCTACCCTTGACATGGTCGGAATCCCGCTGAGAGGTGGGAGTGCTCGAAAGAGAACCGGCGCACAGGTGCTGCATGGCTGTCGTCAGCTCGTGTCGTGAGATGTTGGGTTAAGTCCCGCAACGAGCGCAACCCTTGTCCTTAGTTGCTACGCAAGAGCACTCTAAGGAGACTGCCGGTGACAAACCGGAGGAAGGTGGGGATGACGTCAAGTCCTCATGGCCCTTATGGGTAGGGCTTCACACGTCATACAATGGTCGGAACAGAGGGTTGCCAACCCGCGAGGGGGAGCTAATCCCAGAAAACCGATCGTAGTCCGGATTGCACTCTGCAACTCGAGTGCATGAAGCTGGAATCGCTAGTAATCGCGGATCAGCATGCCGCGGTGAATACGTTCCCGGGTCTTGTACACACCGCCCGTCACACCATGGGAGTGGGTTTTACCAGAAGTGGCTAGTCTAACCGCAAGGAGGACGGTCACCACGGTAGGATTCATGACTGGGGTGAAGTCGTAACAAGGTAGCCGTATCGGAAGGTGCGGCTGGATCACCTCCTTTCCAGAGCTATCTCGCAAAGTTGAGCGCTCACGCTTATCGGCTGTAAATTTAAAGACAGACTCAGGGGTCTGTAGCTCAGTCGGTTAGAGCACCGTCTTGATAAGGCGGGGGTCGTTGGTTCGAATCCAACCAGACCCACCATTGTCTG
>11458037
GACGAACGCTGGCGGCGTGCCTAACACATGCAAGTCGAACGGTTTCGAAGATCGGACTTCGAATTTCGAATTTCGATCATCGAGATAGTGGCGGACGGGTGAGTAACGCGTGGGTAACCTACCCATAAAGCCGGGACAACCCTTGGAAACGAGGGCTAATACCGGATAAGCTTGAGAAGTGGCATCACTTTTTAAGGAAAGGTGGCCGATGAGAATGCTGCCGATTATGGATGGACCCGCGTCTGATTAGCTGGTTGGTGGGGTAAAGGCCTACCAAGGCGACGATCAGTAGCCGGCCTGAGAGGGTGAACGGCCACACTGGGACTGAGACACGGCCCAGACTCCTACGGGAGGCAGCAGTGGGGAATCTTCCGCAATGGACGAAAGTCTGACGGAGCAACGCCGCGTGTATGATGAAGGTTTTCGGATTGTAAAGTACTGTCTATGGGGAAGAATGGTGTGCTTGAGAATATTAAGTACAAATGACGGTACCCAAGGAGGAAGCCCCGGCTAACTACGTGCCAGCAGCCGCGGTAATACGTAGGGGGCAAGCGTTGTCCGGAATTATTGGGCGTAAAGGGCGCGTAGGCGGATAGTTAAGTCCGGTGTGAAAGATCAGGGCTCAACCCTGAGAGTGCATCGGAAACTGGGTATCTTGAGGACAGGAGAGGAAAGTGGAATTCCACGTGTAGCGGTGAAATGCGTAGATATGTGGAGGAACACCAGTGGCGAAGGCGACTTTCTGGACTGTAACTGACGCTGAGGCGCGAAAGCGTGGGGAGCAAACAGGATTAGATACCCTGGTAGTCCACGCTGTAAACGATGAGTGCTAGGTGTAGAGGGTATCGACCCCTTCTGTGCCGCAGTTAACACAATAAGCACTCCGCCTGGGGAGTACGGCCGCAAGGTTGAAACTCAAAGGAATTGACGGGGGCCCGCACAAGCGGTGGAGCATGTGGTTTAATTCGACGCAACGCGAAGAACCTTACCAGGGCTTGACATCCTCTGAACTTGCTGGAAACAGGAAGGTGCCCTTCGGGGAGCAGAGAGACAGGTGGTGCATGGTTGTCGTCAGCTCGTGTCGTGAGATGTTGGGTTAAATCCCGCAACGAGCGCAACCCCTGTATTTAGTTGCTAACGCGTAGAGGCGAGCACTCTGGATAGACTGCCGGTGATAAACCGGAGGAAGGTGGGGATGACGTCAAATCATCATGCCCCTTATGTTCTGGGCTACACACGTGCTACAATGGCCGGTACAGACGGAAGCGAAGCCGCGAGGCGGAGCAAATCCGAGAAAGCCGGTCTCAGTTCGGATTGCAGGCTGCAACTCGCCTGCATGAAGTCGGAATCGCTAGTAATCGCAGGTCAGCATACTGCGGTGAATACGTTCCCGGGCCTTGTACACACCGCCCGTCACACCACGAAAGTCTGCAACACCCGAAGCCGGTGAGGTAACCGACTCGAGATTCGAGGCTCGAAGTTCGAGGATCGAAGTGTAAGCGAAATTAATAAGTCTTAGTAAAGCTAAAAAGCATTAAGACCGATAAGATGATCTTGCAATCGAACATCGAACATCGAATTTCGAACCTCGAGTTGGAGCTAGCCGTCGAAGGTGGGGCCGATAATTGGGGTG
>11469739
AGAGTTTGATCCTGGCTCAGGATGAACGCTGGCGGCGTGCCTAACACATGCAAGTCGAACGAGAAGCTAACTTCTGATTCCTTCGGGATGATGAGGTTAGCAGAAAGTGGCGAACGGGTGAGTAACGCGTGGGTAATCTACCCTGTAAGTGGGGGATAACCCTCCGAAAGGAGGGCTAATACCGCATAATATCTTTATCCCAAAAGAGGTAAAGATTAAAGATGGCCTCTATACTATGCTATCGCTTCAGGATGAGTCCGCGTCCTATTAGTTAGTTGGTGGGGTAATGGCCTACCAAGACGACAATGGGTAGCCGGTCTGAGAGGATGTACGGCCACACTGGGACTGAGATACGGCCCAGACTCCTACGGGAGACAGCAGTGGGGAATATTGCGCAATGGGGGAAACCCTGACGCAGCGACGCCGCGTGGATGATGAAGGCCCTTGGGTTGTAAAATCCTGTTCTGGGGGAAGAAAGCTTAAAGGTCCAATAAACCCTTAAGCCTGACGGTACCCCAAGAGAAAGCTCCGGCTAATTATGTGCCAGCAGCCGCGGTAATACATAAGGAGCAAGCGTTATCCGGAATTATTGGGCGTAAAGAGCTCGTAGGCGGTCTTAAAAGTCAGTTGTGAAATTATCAGGCTCAACCTGATAAGGTCATCTGAAACTCTAAGACTTGAGGTTAGAAGAGGAAAGTGGAATTCCCGGTGTAGCGGTGAAATGCGTAGATATCGGGAGGAACACCAGTGGCGAAGGCGGCTTTCTGGTCTATCTCTGACGCTGAGGAGCGAAAGCTAGGGGAGCAAACGGGATTAGATACCCCGGTAGTCCTAGCTGTAAACGATGGATACTAGGTGTGGGAGGTATCGACCCCTTCTGTGCCGTAGCTAACGCATTAAGTATCCCGCCTGGGGAGTACGGTCGCAAGGCTGAAACTCAAAGGAATTGACGGGGGCCCGCACAAGCGGTGGAGCATGTGGTTTAATTCGACGCAACGCGAAGAACCTTACCGGGACTTGACATTATCTTGCCCGTCTAAGAAATTAGATCTTCTTCCTTTGGAAGACAGGATAACAGGTGGTGCATGGTTGTCGTCAGCTCGTGTCGTGAGATGTTGGGTTAAGTCCCACAACGAGCGCAACCCTTGTGCTTAGTTGCTAACTTGTTTTACAAGTGCACTCTAGGCAGACTGCCGCAGATAATGCGGAGGAAGGTGGGGATGACGTCAAATCATCATGCCCCTTACGTCCCGGGCTACACACGTGCTACAATGGCCTGTACAGAGGGTAGCGAAAGAGCGATCTTAAGCCAATCCCAAAAAGCAGGCCCCAGTTCGGATTGGAGGCTGCAACTCGCCTCCATGAAGTAGGAATCGCTAGTAATCGCGGATCAGCATGCCGCGGTGAATACGTTCCCGGGCCTTGTACACACCGCCCGTCACACCACGAAAGTTGGCGATACCTGAAGTTACTAGGCTAACCTGGCACTCAACTAAGTTCACTAACTTATTTGCTTAAAATAAGGCTTAATGTGCTTAGTTGAGTGCCGGGAGGCAGGTACCGAAGGTATGGCTGGCGATTGGGGTGAAGTCGTAACAAGGTGGAAA
>11469752
AGAGTTTGATCCTGGCTCAGGATGAACGCTGGCGGCGTGCCTAATACATGCAAGTCGAGCGGCAGCGAGTTCCTCACCGAGGTTCGGAACAGTTGACAGTAAACAGTTGACAGTAAACAGTAACTTCAGAAATGAAGCGGACTGTGAACTGTTTACTGTAACCTGTTAGCTATTATTTCGAGCTTTAGTGAGGAATGTCGGCGAGCGGCGGACGGCTGAGTAACGCGTAGGAACGTACCCCAAACTGAGGGATAAGCACCAGAAATGGTGTCTAATACCGCATATGGCCCAGCACCTTTTTTAATCAACCACGACCCTAAAATCGTGAATAATTGGTAGGAAAAGGTGTTGGGTTAAAGCTTCGGCGGTTTGGGAACGGCCTGCGTATGATTAGCTTGTTGGTGAGGTAAAAGCTCACCAAGGCGACGATCATTAGCTGGTCTGAGAGGATGATCAGCCAGACTGGGACTGAGACACGGCCCAGACTCCTACGGGAGGCAGCAGTAGGGAATCTTCCACAATGGGCGAAAGCCTGATGGAGCAACGCCGTGTGCAGGATGAAAGCCTTCGGGTCGTAAACTGCTTTTATATGTGAAGACTTCGACGGTAGCATATGAATAAGGATCGGCTAACTCCGTGCCAGCAGCCGCGGTCATACGGAGGATCCAAGCGTTATCCGGAATTACTGGGCGTAAAGAGTTGCGTAGGTGGCATAGTAAGTTGGTAGTGAAATTGTGTGGCTCAACCATACACCCATTACTAAAACTGCTAAGCTAGAGTATATGAGAGGTAGCTGGAATTCCTAGTGTAGGAGTGAAATCCGTANATATTAGGAGGAACACCGATGGCGTAGGCAGGCTACTGGCATATTACTGACACTAAGGCACGAAAGCGTGGGGAGCGAACGGGATTAGATACCCCGGTAGTCCACGCTGTAAACGATGGATGCTAGCTGTTATGAGTATCGACCCTTGTAGTAGCGAAGCTAACGCGTTAAGCATCCCGCCTGTGGAGTACGAGCGCAAGCTTAAAACATAAAGGAATTGACGGGGACCCGCACAAGCGGTGGAGCGTGTTGTTTAATTCGATGATAAGCGAAGAACCTTACCAAGGCTTGACATCCCTGGAATTTCTCCGAAAGGAGAGAGTGCCTTCGGGAATCAGGTGACAGGTGATGCATGGCCGTCGTCAGCTCGTGTCGTGAGATGTTTGGTTAAGTCCATTAACGAGCGCAACCCTTGTAAATAGTTGGATTTTTCTATTTAGACTGCCTCGGTAACGGGGAGGAAGGAGGGGATGATGTCAGGTCAGTATTTCTCTTACGCCTTGGGCTACAAACACGCTACAATGGCCGGTACAAAGGGCAGCCAACCCGCGAGGGGGAGCAAATCCCATCAAAGCCGGTCTCAGTTCGGATAGCAGGCTGAAATTCGCCTGCTTGAAGTCGGAATCGCTAGTAACGGTGAGTCAGCTATATTACCGTGAATACGTTCCCGGGTCTTGTACACACCGCCCGTCAAGGCATGAAAGTCATCAATACCTGACGTCTGGATTTATTCTGGCCTAAGGTAGGGGCGATGATTGGGCCTAAGTCGTAACAAGGTAA
>11460523
AGAGTTTGATCCTGGCTCAGAACGAACGCTGGCGGCGTGCTTAACACATGCAAGTCGAACGCGAAATCGGGCACTCAATTTTGCTTTTCAAACATTAACTGATGAAACGACCAGAGAGATTGTTCCAGTTTAAAGAGTGAAAAGCAGGCTTGAGTGCCTGAGAGTAGAGTGGCGCACGGGTGAGTAACGCGTAAATAATCTACCCCTGCATCTGGGATAACCCACCGAAAGGTGAGCTAATACCGGATACGTTCTTTTAACCGCGAGGTTTTAAGAAGAAAGGTGGCCTCTGATATAAGCTACTGTGCGGGGAGGAGTTTGCGTACCATTAGCTAGTTGGTAGGGTAATGGCCTACCAAGGCATCGATGGTTAGCGGGTCTGAGAGGATGATCCGCCACACTGGAACTGGAACACGGACCAGACTCCTACGGGAGGCAGCAGTGAGGAATATTGCGCAATGGGGGCAACCCTGACGCAGCGACGCCGCGTGGATGATGAAGGCCTTCGGGTCGTAAAATCCTGTCAGATGGAAAGAAGTGTTATATGGATAATACCTGTATAGCTTGACGGTACCATCAAAGGAAGCACCGGCTAACTCCGTGCCAGCAGCCGCGGTAATACGGAGGGTGCAAGCGTTGTTCGGAATTACTGGGCGTAAAGCGCGCGTAGGCGGTCTGTTATGTCAGATGTGAAAGTCCACGGCTCAACCGTGGAAGTGCATTTGAAACTGACAGACTTGAGTACTGGAGGGGGTGGTGGAATTCCCGGTGTAGAGGTGAAATTCGTAGATATCGGGAGGAATACCGGTGGCGAAGGCGACCACCTGGCCAGATACTGACGCTGAGGTGCGAAAGCGTGGGGAGCAAACAGGATTAGATACCCTGGTAGTCCACGCCGTAAACGATGTCAACTAGGTGTTGGGATGGTTAATCGTCTCATTGCCGGAGCTAACGCATTAAGTTGACCGCCTGGGGAGTACGGTCGCAAGATTAAAACTCAAAGGAATTGACGGGGGCCCGCACAAGCGGTGGAGTATGTGGTTTAATTCGACGCAACGCGCAGAACCTTACCTGGTCTTGACATCCCGAGAATCTCAAGGAAACTTGAGAGTGCCTCTTGAGGAACTCGGTGACAGGTGCTGCATGGCTGTCGTCAGCTCGTGTCGTGAGATGTTGGGTTAAGTCCCGCAACGAGCGCAACCCTTGTCTTTAGTTGCCATCATTAAGTTGGGCACTCTAAAGAGACTGCCGGTGTCAAACCGGAGGAAGGTGGGGATGACGTCAAGTCCTCATGGCCTTTATGACCAGGGCTACACACGTACTACAATGGCATAGACAAAGGGCAGCGACATCGCGAGGTGAAGCGAATCCCATAAACCATGTCTCAGTCCGGATTGGAGTCTGCAACTCGACTCCATGAAGTTGGAATCGCTAGTAATCGTAGATCAGCATGCTACGGTGAATACGTTCCCGGGCCTTGTACACACCGCCCGTCACACCACGGGAGTTGGTTGTACCAGAAGCAGTTGAGCGAACTATTCGTAGACGCAGGCTGCCAAGGTATGATTGGTAACTGGGGTGAAGTCGTAACAAGGTAACC
>11460543
TGGTTTGATCCTGGCTCAGGACAAACGCTGGCGGCGTGCCTAACACATGCAAGTCGAACGAGAAGCCAGCTTTTGATTCCTTCGGGATGAGAAAGCAGGTAGAAAGTGGCGAACGGGTGAGTAACGCGTGGGTAATCTACCCTGTAAGTAGGGGATAACCCTCTGAAAAGAGGGCTAATACCGCATAATATCTTTACCCCATAAGAAGTAAAGATTAAAGATGGCCTCTGTATATGCTATCGCTTCAGGATGAGCCCGCGTCCTATTAGTTAGTTGGTAAGGTAATGGCTTACCAAGACCACGATGGGTAGCCGGTCTGAGAGGATGTACGGCCACACTGGGACTGAGATACGGCCCAGACTCCTACGGGAGGCAGCAGTGGGGAATATTGCGCAATGGGGGAAACCCTGACGCAGCGACGCCGCGTGGATGATGAAGGCCTTCGGGTTGTAAAATCCTGTTTTGGGGGACGAAACCTTAAGGGTCCAATAAACCCTTAAATTGACGGTACCCCAAGAGAAAGCTCCGGCTAATTATGTGCCAGCAGCCGCGGTAATACATAAGGAGCAAGCGTTGTCCGGAATTATTGGGCGTAAAGAGTTCGTAGGCGGTCTTAAAAGTCAGGTGTGAAATTATCAGGCTTAACCTGATACGGTCATCTGAAACTTTAAGACTTGAGGTTAGGAGAGGAAAGTGGAATTCCCGGTGTAGCGGTGAAATGCGTAGATATCGGGAGGAACACCAGTGGCGAAGGCGGCTTTCTGGCCTAACTCTGACGCTGAGGAACGAAAGCTAGGGGAGCAAACGGGATTAGATACCCCGGTAGTCCTAGCTGTAAACGATGGATACTAGGTGTGGGAGGTATCGACCCCTTCTGTGCCGWCACTAACGCATTAAGTATCCCGCCTGGGGAGTACGGTCGCAAGGCTAAAACTCAAAGGAATTGACGGGGGCCCGCACAAGCGGTGGAGCATGTGGTTTAATTCGACGCAACGCGAAGAACCTTACCGGGGCTTGACATTGTCTTGCCCGTTTAAGAAATTAAATTTTCTTCCCTTTTAGGGAAGACAAGATAACAGGTGGTGCATGGTTGTCGTCAGCTCGTGTCGTGAGATGTTGGGTTAAGTCCCACAACGAGCGCAACCCTTATTCTTAGTTGCTAGTTTGTTTACAAACGCACTCTAAAGAGACTGCCGCAGATAATGCGGAGGAAGGTGGGGATGACGTCAAATCATCATGCCCCTTACGTCCCGGGCTACACACGTGCTACAATGGCCTGTACAGAGGGTAGCGAAAGAGCGATCTCAAGCTAATCCCTTAAAACAGGTCTCAGTTCGGATTGGAGGCTGCAACTCGCCTCCATGAAGTCGGAATCGCTAGTAATCGCGGATCAGCATGCCGCGGTGAATACGTTCCCGGGCCTTGTACACACCGCCCGTCACACCATGAAAGTTGGCGATACCTGAAGTTACTGTGCTAACCCGGCACTCAACTAAGTACATTAAGTCTTATTTTAAGCTATTGTATTTAGTTGAGTGCCGGGAGGCAGGTACCTAAGGTATGGCTAGCGATTGGGGTGAAGTCGTAACAAGGTAGCCG
>11480235
TGGTTTGATCCTGGCTCAGGATTAACGCTGGCGGCGCGCCTTATACATGCAAGTCGAACGAGCCTTGTGCTTCGCACAAGGAAATTCCAAGCACCAAGCACCAAATCTCAAACAAATCCCAATGACCAAAATTCCAAAAACCTAAACATTTTAAATGTTTAGAATTTGGAAAATTGGAATTTGGAATTTATTTGTTATTTGGAATTTATGATTTGGGATTTTCTCGCGCGGAGANCNTNAGTGGCGAACGGGTGAGTAATACGTTGGTATCTACCCCAAAGTAGAGAATAAGCCCGAGAAATCGGGGTTAATACTCTATGTGTTCGAAAGAACAAAGACTTCGGTTGCTTTGGGAAGAACCTGCGGCCTATCAGCTTGTTGGTAAGGTAACGGCTTACCAAGGCTTTGACGGGTAGCTGGTCTGGGAAGACGACCAGCCACAATGGGACTTAGACACGGCCCATACTCCTACGGGAGGCAGCAGTAGGGAATCTTCGGCAATGCCCGAAAGGTGACCGAGCGACGCCGCGTAGAGGAAGAAGATCTTTGGATTGTAAACTCTTTTTCTCCTAGACAAAGTTCTGATTGTATAGGAGGAATAAGGGGTTTCTAAACTCGTGCCAGCAGAAGCGGTAATACGAGTGCCCCAAGCGTTATCCGGAATCATTGGGCGTAGAGCGTTGTATAGGTGGTTTAAAAAGTCCAAAATTAAATCTTTAGGCTCAACCTAAAATCTGTTTTGGAAACTTTTAGACTTGAATAAAATCGACGSGAGTGGAACTTCCAGAGTAGGGGTTACATCCGTTGATACTGGAAGGAACGCCGAAGGCGAAAGCAACTCGCGAGATTTTATTGACGCCGCGTACACGAAAGCGTGGGGAGCGAAAAGTATTAGATACACTTGTAGTCCACGCCGTAAACTATGGATACTAGCAATTTGAAGCTTCGACCCTTCAAGTTGCGGACTAACGCGTTAAGTATCTCGCCTGGGAAGTACGGCCGCAAGGCTAAAACTCAAAGGAATAGACGGGGGCCCGCACAAGCGGTGGAGCATGTGGTTTAATTCGACGATAAGCGTGGAACCTTACCAGGGCTTAGACGTACAGAGAATTCCTTGGAAACAAGGAAGTGCTTCGGGAACTCTGTACTCAGGTACTGCATGGCTGTCGTCAGTATGTACTGTGAAGCACTCCCTTAATTGGGGCAACATACGCAACCCCTATCCTAAGTTAGAAATGTCTTAGGAAACCGCTTCGATTCATCGGAGAGGAAGATGGGGACGACGTCAAGTCAGCATGGTCCTTGATGTCCTGGGCGACACACGTGCTACAATGGCTAGTATAACGGGATGCGTAGGTGCGAACCGAAGCTAATCCTTAAAAAACTAGTCTAAGTTCGGATTGAAGTCTGCAACTCGACTTCATGAAGCCGGAATCGCTAGTAACCGCAAATCAGCCACGTTGCGGTGAATACGTTCTCGGGCCTTGTACTCACTGCCCGTCACGTCAAAAAAGTCGGTAATACCCGAAGCACCCTTTTAAAGGGTTCTAAGGTAGGACCGATGATTGGGACGAAGTCGTAACAAGGTAGCCG
>11480408
AATTTAGCGGCCGCGAATTCGCCCTTGAGTTTGATCCTGGCTCAGGACGAACGCTGGCGGCGTGCTTAACACATGCAAGTCGAACGGGGATATCCGAGCGGAAGGTTTCGGCCGGAAGGTTGGGTATTCGAGTGGCGGACGGGTGAGTAACGCGTGAGCAATCTGTCCCGGACAGGGGGATAACACTTGGAAACAGGTGCTAATACCGCATAAGACCACAGCATCGCATGGTGCAGGGGTAAAAGGAGCGATCCGGTCTGGGGTGAGCTCGCGTCCGATTAGATAGTTGGTGAGGTAACGGCCCACCAAGTCAACGATCGGTAGCCGACCTGAGAGGGTGATCGGCCACATTGGAACTGAGAGACGGTCCAAACTCCTACGGGAGGCAGCAGTGGGGAATATTGGGCAATGGGCGAAAGCCTGACCCAGCAACGCCGCGTGAGTGAAGAAGGCCTTCGGGTTGTAAAGCTCTGTTATGCGAGACGAAGGAAGTGACGGTATCGCATAAGGAAGCCCCGGCTAACTACGTGCCAGCAGCCGCGGTAATACGTAGGGGGCGAGCGTTGTCCGGAATGACTGGGCGTAAAGGGCGTGTAGGCGGCCGTTTAAGTATGGAGTGAAAGTCCATTTTTCAAGGATGGAATTGCTTTGTAGACTGGATGGCTTGAGTGCGGAAGAGGTAAGTGGAATTCCCAGTGTAGCGGTGAAATGCGTAGAGATTGGGAGGAACACCAGTGGCGAAGGCGACTTACTGGGCCGTAACTGACGCTGAGGCGCGAAAGCGTGGGGAGCGAACAGGATTAGATACCCTGGTAGTCCACGCGGTAAACGATGAATGCTAGGTGTTGCGGGTATCGACCCCTGCAGTGCCGGAGTAAACACAATAAGCATTCCGCCTGGGGAGTACGGCCGCAAGGTTGAAACTCAAGGGAATTGACGGGGGCCCGCACAAGCAGCGGAGCATGTTGTTTAATTCGAAGCAACGCGAAGAACCTTACCAGGTCTTGACATCCAGTTAAGCTCATAGAGATATGAGGTCCCTTCGGGGGAACTGAGACAGGTGGTGCATGGTTGTCGTCAGCTCGTGTCGTGAGATGTTGGGTTAAGTCCCGCAACGAGCGCAACCCTTATGGTCAGTTACTAACGCGTGAAGGCGAGGACTCTGACGAGACTGCCGGGGACAACTCGGAGGAAGGTGGGGACGACGTCAAATCATCATGCCCCTTATGACCTGGGCTACAAACGTGCTACAATGGTGACTACAAAGAGGAGCGAGACTGTAAAGTGGAGCGGATCTCAAAAAAGTCATCCCAGTTCGGATTGTGGGCTGCAACCCGCCCACATGAAGTTGGAGTTGCTAGTAATCGCGGATCAGCATGCCGCGGTGAATACGTTCCCGGGCCTTGTACACACCGCCCGTCACACCATGGGAGTTGGGAGCACCCGAAGTCAGTGAGGTAACCGGAAGGAGCCAGCTGCCGAAGGTGAGACCGATGACTGGGGTGAAGTCGTAACAAGGTAGCCGTATCGGAAGGTGCGGCTGGATCACCTCCTTAAGGGCGAATTCGTTTAAACCTGCAGGACTAG
"""
ref_seqs = """
>AY800210
TTCCGGTTGATCCTGCCGGACCCGACTGCTATCCGGATGCGACTAAGCCATGCTAGTCTAACGGATCTTCGGATCCGTGGCATACCGCTCTGTAACACGTAGATAACCTACCCTGAGGTCGGGGAAACTCCCGGGAAACTGGGCCTAATCCCCGATAGATAATTTGTACTGGAATGTCTTTTTATTGAAACCTCCGAGGCCTCAGGATGGGTCTGCGCCAGATTATGGTCGTAGGTGGGGTAACGGCCCACCTAGCCTTTGATCTGTACCGGACATGAGAGTGTGTGCCGGGAGATGGCCACTGAGACAAGGGGCCAGGCCCTACGGGGCGCAGCAGGCGCGAAAACTTCACAATGCCCGCAAGGGTGATGAGGGTATCCGAGTGCTACCTTAGCCGGTAGCTTTTATTCAGTGTAAATAGCTAGATGAATAAGGGGAGGGCAAGGCTGGTGCCAGCCGCCGCGGTAAAACCAGCTCCCGAGTGGTCGGGATTTTTATTGGGCCTAAAGCGTCCGTAGCCGGGCGTGCAAGTCATTGGTTAAATATCGGGTCTTAAGCCCGAACCTGCTAGTGATACTACACGCCTTGGGACCGGAAGAGGCAAATGGTACGTTGAGGGTAGGGGTGAAATCCTGTAATCCCCAACGGACCACCGGTGGCGAAGCTTGTTCAGTCATGAACAACTCTACACAAGGCGATTTGCTGGGACGGATCCGACGGTGAGGGACGAAACCCAGGGGAGCGAGCGGGATTAGATACCCCGGTAGTCCTGGGCGTAAACGATGCGAACTAGGTGTTGGCGGAGCCACGAGCTCTGTCGGTGCCGAAGCGAAGGCGTTAAGTTCGCCGCCAGGGGAGTACGGCCGCAAGGCTGAAACTTAAAGGAATTGGCGGGGGAGCAC
>EU883771
TGGCGTACGGCTCAGTAACACGTGGATAACTTACCCTTAGGACTGGGATAACTCTGGGAAACTGGGGATAATACTGGATATTAGGCTATGCCTGGAATGGTTTGCCTTTGAAATGTTTTTTTTCGCCTAAGGATAGGTCTGCGGCTGATTAGGTCGTTGGTGGGGTAATGGCCCACCAAGCCGATGATCGGTACGGGTTGTGAGAGCAAGGGCCCGGAGATGGAACCTGAGACAAGGTTCCAGACCCTACGGGGTGCAGCAGGCGCGAAACCTCCGCAATGTACGAAAGTGCGACGGGGGGATCCCAAGTGTTATGCTTTTTTGTATGACTTTTCATTAGTGTAAAAAGCTTTTAGAATAAGAGCTGGGCAAGACCGGTGCCAGCCGCCGCGGTAACACCGGCAGCTCGAGTGGTGACCACTTTTATTGGGCTTAAAGCGTTCGTAGCTTGATTTTTAAGTCTCTTGGGAAATCTCACGGCTTAACTGTGAGGCGTCTAAGAGATACTGGGAATCTAGGGACCGGGAGAGGTAAGAGGTACTTCAGGGGTAGAAGTGAAATTCTGTAATCCTTGAGGGACCACCGATGGCGAAGGCATCTTACCAGAACGGCTTCGACAGTGAGGAACGAAAGCTGGGGGAGCGAACGGGATTAGATACCCCGGTAGTCCCAGCCGTAAACTATGCGCGTTAGGTGTGCCTGTAACTACGAGTTACCGGGGTGCCGAAGTGAAAACGTGAAACGTGCCGCCTGGGAAGTACGGTCGCAAGGCTGAAACTTAAAGGAATTGGCGGGGGAGCACCACAACGGGTGGAGCCTGCGGTTTAATTGGACTCAACGCCGGGCAGCTCACCGGATAGGACAGCGGAATGATAGCCGGGCTGAAGACCTTGCTTGACCAGCTGAGA
>EF503699
AAGAATGGGGATAGCATGCGAGTCACGCCGCAATGTGTGGCATACGGCTCAGTAACACGTAGTCAACATGCCCAGAGGACGTGGACACCTCGGGAAACTGAGGATAAACCGCGATAGGCCACTACTTCTGGAATGAGCCATGACCCAAATCTATATGGCCTTTGGATTGGACTGCGGCCGATCAGGCTGTTGGTGAGGTAATGGCCCACCAAACCTGTAACCGGTACGGGCTTTGAGAGAAGGAGCCCGGAGATGGGCACTGAGACAAGGGCCCAGGCCCTATGGGGCGCAGCAGGCACGAAACCTCTGCAATAGGCGAAAGCTTGACAGGGTTACTCTGAGTGATGCCCGCTAAGGGTATCTTTTGGCACCTCTAAAAATGGTGCAGAATAAGGGGTGGGCAAGTCTGGTGTCAGCCGCCGCGGTAATACCAGCACCCCGAGTTGTCGGGACGATTATTGGGCCTAAAGCATCCGTAGCCTGTTCTGCAAGTCCTCCGTTAAATCCACCCGCTTAACGGATGGGCTGCGGAGGATACTGCAGAGCTAGGAGGCGGGAGAGGCAAACGGTACTCAGTGGGTAGGGGTAAAATCCTTTGATCTACTGAAGACCACCAGTGGTGAAGGCGGTTCGCCAGAACGCGCTCGAACGGTGAGGATGAAAGCTGGGGGAGCAAACCGGAATAGATACCCGAGTAATCCCAACTGTAAACGATGGCAACTCGGGGATGGGTTGGCCTCCAACCAACCCCATGGCCGCAGGGAAGCCGTTTAGCTCTCCCGCCTGGGGAATACGGTCCGCAGAATTGAACCTTAAAGGAATTTGGCGGGGAACCCCCACAAGGGGGAAAACCGTGCGGTTCAATTGGAATCCACCCCCCGGAAACTTTACCCGGGCGCG
>DQ260310
GATACCCCCGGAAACTGGGGATTATACCGGATATGTGGGGCTGCCTGGAATGGTACCTCATTGAAATGCTCCCGCGCCTAAAGATGGATCTGCCGCAGAATAAGTAGTTTGCGGGGTAAATGGCCACCCAGCCAGTAATCCGTACCGGTTGTGAAAACCAGAACCCCGAGATGGAAACTGAAACAAAGGTTCAAGGCCTACCGGGCACAACAAGCGCCAAAACTCCGCCATGCGAGCCATCGCGACGGGGGAAAACCAAGTACCACTCCTAACGGGGTGGTTTTTCCGAAGTGGAAAAAGCCTCCAGGAATAAGAACCTGGGCCAGAACCGTGGCCAGCCGCCGCCGTTACACCCGCCAGCTCGAGTTGTTGGCCGGTTTTATTGGGGCCTAAAGCCGGTCCGTAGCCCGTTTTGATAAGGTCTCTCTGGTGAAATTCTACAGCTTAACCTGTGGGAATTGCTGGAGGATACTATTCAAGCTTGAAGCCGGGAGAAGCCTGGAAGTACTCCCGGGGGTAAGGGGTGAAATTCTATTATCCCCGGAAGACCAACTGGTGCCGAAGCGGTCCAGCCTGGAACCGAACTTGACCGTGAGTTACGAAAAGCCAAGGGGCGCGGACCGGAATAAAATAACCAGGGTAGTCCTGGCCGTAAACGATGTGAACTTGGTGGTGGGAATGGCTTCGAACTGCCCAATTGCCGAAAGGAAGCTGTAAATTCACCCGCCTTGGAAGTACGGTCGCAAGACTGGAACCTAAAAGGAATTGGCGGGGGGACACCACAACGCGTGGAGCCTGGCGGTTTTATTGGGATTCCACGCAGACATCTCACTCAGGGGCGACAGCAGAAATGATGGGCAGGTTGATGACCTTGCTTGACAAGCTGAAAAGGAGGTGCAT
>EF503697
TAAAATGACTAGCCTGCGAGTCACGCCGTAAGGCGTGGCATACAGGCTCAGTAACACGTAGTCAACATGCCCAAAGGACGTGGATAACCTCGGGAAACTGAGGATAAACCGCGATAGGCCAAGGTTTCTGGAATGAGCTATGGCCGAAATCTATATGGCCTTTGGATTGGACTGCGGCCGATCAGGCTGTTGGTGAGGTAATGGCCCACCAAACCTGTAACCGGTACGGGCTTTGAGAGAAGTAGCCCGGAGATGGGCACTGAGACAAGGGCCCAGGCCCTATGGGGCGCAGCAGGCGCGAAACCTCTGCAATAGGCGAAAGCCTGACAGGGTTACTCTGAGTGATGCCCGCTAAGGGTATCTTTTGGCACCTCTAAAAATGGTGCAGAATAAGGGGTGGGCAAGTCTGGTGTCAGCCGCCGCGGTAATACCAGCACCCCGAGTTGTCGGGACGATTATTGGGCCTAAAGCATCCGTAGCCTGTTCTGCAAGTCCTCCGTTAAATCCACCTGCTCAACGGATGGGCTGCGGAGGATACCGCAGAGCTAGGAGGCGGGAGAGGCAAACGGTACTCAGTGGGTAGGGGTAAAATCCATTGATCTACTGAAGACCACCAGTGGCGAAGGCGGTTTGCCAGAACGCGCTCGACGGTGAGGGATGAAAGCTGGGGGAGCAAACCGGATTAGATACCCGGGGTAGTCCCAGCTGTAAACGGATGCAGACTCGGGTGATGGGGTTGGCTTCCGGCCCAACCCCAATTGCCCCCAGGCGAAGCCCGTTAAGATCTTGCCGCCCTGTCAGATGTCAGGGCCGCCAATACTCGAAACCTTAAAAGGAAATTGGGCGCGGGAAAAGTCACCAAAAGGGGGTTGAAACCCTGCGGGTTATATATTGTAAACC
"""
id_to_tax = """
AY800210 Archaea;Euryarchaeota;Halobacteriales;uncultured
EU883771 Archaea;Euryarchaeota;Methanomicrobiales;Methanomicrobium et rel.
EF503699 Archaea;Crenarchaeota;uncultured;uncultured
DQ260310 Archaea;Euryarchaeota;Methanobacteriales;Methanobacterium
EF503697 Archaea;Crenarchaeota;uncultured;uncultured
"""
in_seqs_aligned = """
>11472384 1..1530
---------------------------------------------------------------------------------------------------------------AGAGTTTGAT-CC-T-G-GCTC-AG-AT-TGAA-C-GC--TGG-C--G-GC-A-TG--C----C-T--TACACA-T-GC-A-AGT-CGA-A-CG----------G-CAG-CA-C------------------------------GG-G-GG----------------------------------------------------CAA------------------------------------------------------------------------------------C-C-CT------------------G-GT--G--GC--G--AG-T-GG-C-GA-A--C-------------GGG-TGAGT-A--AT-AC-A-T-C-GG---A-A--C-GT-G--T-C-CTG--TA-G------------------------------------------------------------------T-GG----GGG-AT-AG-CCC-------------------------G-G-C-----------------------GAA-A---GCC-GGA-TTAA-TA---CC-G--C-AT-A----------C--------------------G-------------------------------------CT-C-----------------------------------------------------------------------------------------------------------------------T-AC-G--------------------------------------------------------------------------------------------------------------------------------------G-A-G---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------GAAA--G-G-G-GG-----G--GA-T--C--------------------------------------------------------------------------------------------------------------------TTA-G----------------------------------------------------------------------------------------------------------------------G-A--CC-TC--C---C-G--------------C----T-A---C-AG-G---GG---C-G-----G-CCG-ATG--G-CAG--A------TT--A--G-CT-A----G---TTGG-T-G-GG-G-T----AAA-GG-C-C-T-ACCA--A-GG-C-G--A-CG-A------------TCT-G-T------AG-CT-G-G-TCT-G-AG----A--GG-AC--G-AC-C-AG-CCAC-A-CTGGG--A-C-TG-A-GA-C-AC-G-G-CCCAGA-CTCC-TAC-G--G-G-A-G-GC-A-GC-A-G-TG---GG-G-A-ATT-TTGGA-C-AA-T-GG--GG-GC-A----A-C-CC-T-GA-TC-CA-GCAA-TGCC-G-CG-T---G-T-G--T--GA-A-G--A--A-G-G-CC-----TT-CG---------G-G-T-T-G-T--A---AA-G-CAC--------TT-TT-G-T--C-CGG----AA-A--G---AA-AACG---CCGT-GG----T--T--AA-T---A----C-----CC-G-TGG-CGG-AT-GA-CG-GT-A-C-CG-G-AA-G---------AA-----------TAAGC-ACC-GG-C-TAA---C--T-ACGT--GCCA--G-C---A--GCCG---C-GG--TA-AT--AC---GT-AG-GGT-GCA-A-G-CG-TTAA-T-CGG-AA-TT-A--C-T--GGGC-GTA----AA-GCGT-GC--G-CA-G-G-C-G------------G--T-CC-G-C-T-AA----G-A-C-A---G-ATG-TG-A-AA-TC--CC-CGG-G--------------------------------------------------------------------CT-T-AA-------------------------------------------------------------------------CC-T-G-GG-AA-C----T-G-C-A-T-T--------T--GT-G-A-C-T-G-GCG--G-G-C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------T-A-G-A-G-T-A-----T-GG--CA-G-A------------G-GG-G-GG-T----AG--AATT-CCA-C-GT--GT-A-GCA-GTGAAA-TG-CGT-AGAG-A-TG-T-GGA--GG-A-AT-A-CC-GA--T--G--GC-GAA-G--G-C---A----G--C-C-CCCTG------G-GC-CA--------------------------------------------------------------AT-A-C-T--GA--CG-----CT-CA-TG--C-A-CGA--AA-G-C--------------G-TGGG-GAG-C-A-AACA--GG-ATTA-G-ATA-C-----CC-T-G-GTA-G-T----C-CA--C-G-CCC-T-AAA--C-GATG-TC--AA-CT---------A-GT--T--G-T-CG-G-GT-C--T---------------------------------------------------------------------------------------TC-AT--------------------------------------------------------------------------------------------------------------------------------------------------T-G-A-CT--T-G-G-T-AA-C------GT--A----GC-TAA--CG-C-G-T--G--AA-GT--T----G-ACC-GCC-T-G-GG-GAG-TA---CGG-----T-C--G-C-A-A-GAT-T--AAA-ACTC-AAA---------GGAA-TTG-ACGGG-G-A-CCCG----C-A--C-A-A-GCG-GT-G--G--AT-GA-T--GT-GGA-TT-AATT-C-G-ATG-CAAC-G-CG-A-AA-A-A-CC-TT-A-CC-TACCC-TT-G-AC-A-T-G--------------TAT-G-G-------------A-AT-C-C-T--GC--T-GA-G-A-G--G-T-G--G-G-A-G--T-GC----CC-------------------------------------G--AA-A------------------------------------------GG---GA----G---CC-ATA---A--CA---------------------------------------------------C-A-G-G-T-GCTG-CA-TGG-CT--GTC-GTC-A-GC-TC---G-TG-TC-G--TGA-GA-TGT-T-GG-G-TT-AA-GT-CCCGC-AA--------C-GAG-CGC-A-ACC-C-T-TG--TC--C-CTAG--T-T-G-C-T---A--C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------GCAA-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------G------------A----G---C-A--CT---------------C-T-A-G-G-GA-G--AC-T-G-CCG--G-T------------------------------------G-A---CAA----------------------------------A-C-C-G--G-A-GG-A--AGG-T--GGGG-A-TGAC-GTC--AAGT-C---CTC-A-T-G-G-C-C-CTT----AT-G--GG-T-A-GG-GC-TT-CAC-ACGTC-A--TA--CAATG---G-TCGG-A-A--C-AGA-GG-GT--------------------------------------------------------------------------------------------------C-G-C-C-A--A-CCCG-C--G---------------------------------------A-GG-G-G-----------G--A-G-CC---A----------A--TCC-C------A-G-AAAAC-CG-A-T-C-G-TAG-TCC--------GGA-T-CGCAC-TC--T-GCAA-CT-C-------------------------------------------------------------------------------------------------G-AGTGC-G-T-G-AA-G-CT-GGAAT-CG-C-TA--G-TA-AT-C-G-C----GGA-TC-A-G-C-------AT--GCC-GC-G-GT-G-AAT-ACGT-T-CCCGGGTCT-TGTA----CACACCG-CCC-GTC-----A---CA--CCA-TG-GG-A--G---TGG-G-TT-TT-ACC--A-GAA------G--T-GGC-TA-G-T-C-T-AA-C-C-------------------------------------------------------------G-CA-A------------------------------------------------------------------------------------------------------GG-A--GG-A--C---GG-TCA--CC--ACG-G----T-AGG-AT-TCA------------------------TG--ACT-GGGG-TGAAGTCG--TAACAA-GGTAG-CCGT-ATCGGAA-GGTG-CGGC-TGGATCACCTCCTTTCTCGAG----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>11468680 6..1535
------------------------------------------------------------------------------------------------------------TGAAGAGTTTGAT-CC-T-G-GCTC-AG-AT-TGAA-C-GC--TGG-C--G-GC-A-TG--C-----CT--TACACA-T-GC-A-AGT-CGA-A-CG----------G-CAG-CA-C------------------------------GG-G-TG------------------------------------------------------C-T----------------------------------------------------------------------------------TGCACCT-----------------G-GT--G--GC--G--AG-T-GG-C-GA-A--C-------------GGG-TGAGT-A--AT-AC-A-T-C-GG---A-A--C-AT-G--T-C-CTG--TA-G------------------------------------------------------------------T-GG----GGG-AT-AG-CCC-------------------------G-G-C-----------------------GAA-A---GC--CGGATTAA-TA---CC-G--C-AT-A----------C--------------------G-------------------------------------AT-C-----------------------------------------------------------------------------------------------------------------------T-AC-G--------------------------------------------------------------------------------------------------------------------------------------G-A-T---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------GAAA--G-C-G-GG-----G--GA-C--C--------------------------------------------------------------------------------------------------------------------TTC-G----------------------------------------------------------------------------------------------------------------------G-G--CC-TC--G---C-G--------------C----T-A---T-AG-G---GT---T-G-----G-CCG-ATG--G-CTG--A------TT--A--G-CT-A----G---TTGG-T-G-GG-G-T----AAA-GG-C-C-T-ACCA--A-GG-C-G--A-CG-A------------TCA-G-T------AG-CT-G-G-TCT-G-AG----A--GG-AC--G-AC-C-AG-CCAC-A-CTGGG--A-C-TG-A-GA-C-AC-G-G-CCCAGA-CTCC-TAC-G--G-G-A-G-GC-A-GC-A-G-TG---GG-G-A-ATT-TTGGA-C-AA-T-GG--GC-GA-A----A-G-CC-T-GA-TC-CA-GCAA-TGCC-G-CG-T---G-T-G--T--GA-A-G--A--A-G-G-C-----CTT-CG---------G-G-T-T-G-T--A---AA-G-CAC--------TT-TT-G-T--C-CGG----AA-A--G---AA-ATCC---TTGG-CT----C--T--AA-T---A----C------A-G-TCG-GGGGAT-GA-CG-GT-A-C-CG-G-AA-G---------AA-----------TAAGC-ACC-GG-C-TAA---C--T-ACGT--GCCA--G-C---A--GCCG---C-GG--TA-AT--AC---GT-AG-GGT-GCG-A-G-CG-TTAA-T-CGG-AA-TT-A--C-T--GGGC-GTA----AA-GCGT-GC--G-CA-G-G-C-G------------G--T-TT-G-C-T-AA----G-A-C-C---G-ATG-TG-A-AA-TC--CC-CGG-G--------------------------------------------------------------------CT-C-AA-------------------------------------------------------------------------CC-T-G-GG-AA-C----T-G-C-A-T-T--------G--GT-G-A-C-T-G-GCA--G-G-C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------T-A-G-A-G-T-A-----T-GG--CA-G-A------------G-GG-G-GG-T----AG--AATT-CCA-C-GT--GT-A-GCA-GTGAAA-TG-CGT-AGAG-A-TG-T-GGA--GG-A-AT-A-CC-GA--T--G--GC-GAA-G--G-C---A----G--C-C-CCCTG------G-GC-CA--------------------------------------------------------------AT-A-C-T--GA--CG-----CT-CA-TG--C-A-CGA--AA-G-C--------------G-TGGG-GAG-C-A-AACA--GG-ATTA-G-ATA-C-----CC-T-G-GTA-G-T----C-CA--C-G-CCC-T-AAA--C-GATG-TC--AA-CT---------A-GT--T--G-T-TG-G-GG-A--T---------------------------------------------------------------------------------------TC-AT--------------------------------------------------------------------------------------------------------------------------------------------------T-T-C-CT--T-A-G-T-AA-C------GT--A----GC-TAA--CG-C-G-T--G--AA-GT--T----G-ACC-GCC-T-G-GG-GAG-TA---CGG-----T-C--G-C-A-A-GAT-T--AAA-ACTC-AAA---------GGAA-TTG-ACGGG-G-A-CCCG----C-A--C-A-A-GCG-GT-G--G--AT-GA-T--GT-GGA-TT-AATT-C-G-ATG-CAAC-G-CG-A-AA-A-A-CC-TT-A-CC-TACCC-TT-G-AC-A-T-G--------------GTC-G-G-------------A-AT-C-C-C--GC--T-GA-G-A-G--G-T-G--G-G-A-G--T-GC----TC-------------------------------------G--AA-A------------------------------------------GA---GA----A---CC-GGC---G--CA---------------------------------------------------C-A-G-G-T-GCTG-CA-TGG-CT--GTC-GTC-A-GC-TC---G-TG-TC-G--TGA-GA-TGT-T-GG-G-TT-AA-GT-CCCGC-AA--------C-GAG-CGC-A-ACC-C-T-TG--TC--C-TTAG--T-T-G-C-T---A--C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------GCAA-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------G------------A----G---C-A--CT---------------C-T-A-A-G-GA-G--AC-T-G-CCG--G-T------------------------------------G-A---CAA----------------------------------A-C-C-G--G-A-GG-A--AGG-T--GGGG-A-TGAC-GTC--AAGT-C---CTC-A-T-G-G-C-C-CTT----AT-G--GG-T-A-GG-GC-TT-CAC-ACGTC-A--TA--CAATG---G-TCGG-A-A--C-AGA-GG-GT--------------------------------------------------------------------------------------------------T-G-C-C-A--A-CCCG-C--G---------------------------------------A-GG-G-G-----------G--A-G-CT---A----------A--TCC-C------A-G-AAAAC-CG-A-T-C-G-TAG-TCC--------GGA-T-TGCAC-TC--T-GCAA-CT-C-------------------------------------------------------------------------------------------------G-AGTGC-A-T-G-AA-G-CT-GGAAT-CG-C-TA--G-TA-AT-C-G-C----GGA-TC-A-G-C-------AT--GCC-GC-G-GT-G-AAT-ACGT-T-CCCGGGTCT-TGTA----CACACCG-CCC-GTC-----A---CA--CCA-TG-GG-A--G---TGG-G-TT-TT-ACC--A-GAA------G--T-GGC-TA-G-T-C-T-AA-C-C-------------------------------------------------------------G-CA-A------------------------------------------------------------------------------------------------------GG-A--GG-A--C---GG-TCA--CC--ACG-G----T-AGG-AT-TCA------------------------TG--ACT-GGGGTGAAGTCGTAACAAGGTAGC----CGTA--TCGGAA-GGTG-CGGC-TGGATCACCTCCTTT-C--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>11458037 1..1544
-----------------------------------------------------------------------------------------------------------------------------------------GA-CGAA-C-GC--TGG-C--G-GC-G-TG--C----C-T--AACACA-T-GC-A-AGT-CGA-A-CGG----------------T--TTC-------------------------------------------------------------------------------GAAGAT-CGG-A-CTTCGAA-----------------------------------------------------------------------------------------TTTCGA--A-TTTCGATCATCGAGATAGT-GG-C-GG-A--C-------------GGG-TGAGT-A--AC-GC-G-T-G-GG---TAA--C-CT-A--C-C-CAT--AA-A------------------------------------------------------------------G-CC----GGG-AC-AA-CCC-------------------------T-T-G-----------------------GAA-A---CGA-GGG-CTAA-TA---CC-G--G-AT-A----------A--------------------G-C--T-T-G--A--G-----------------AA---GT-G-----------------------------------------------------------------------------------------------------------------------G-CA-T--------------------------------------------------------------------------------------------------------------------------------------C-A-C--T-T---------------T--T-T-A-A-G-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------GAAA--G-G-T-GG-----C--C--GA-T--------------------------------------------------------------------------------------------------------------------GAG-A----------------------------------------------------------------------------------------------------------------------A-T--GC-TG--C---C-G--------------A----T-T---A-TG-G---AT---G-G-----A-CCC-GCG--T-CTG--A------TT--A--G-CT-G----G---TTGG-T-G-GG-G-T----AAA-GG-C-C-T-ACCA--A-GG-C-G--A-CG-A------------TCA-G-T------AG-CC-G-G-CCT-G-AG----A--GG-GT--G-AA-C-GG-CCAC-A-CTGGG--A-C-TG-A-GA-C-AC-G-G-CCCAGA-CTCC-TAC-G--G-G-A-G-GC-A-GC-A-G-TG---GG-G-A-ATC-TTCCG-C-AA-T-GG--AC-GA-A----A-G-TC-T-GA-CG-GA-GCAA-CGCC-G-CG-T---G-T-A--T--GA-T-G--A--A-G-G-TT-----TT-CG---------G-A-T-T-G-T--A---AA-G-TAC--------TG-TC-T-A--T-GGG----GA-A--G---AATGGTG---TGCT-TG----A--G--AA-T---A----T-----TA-A-GTA-CAA-AT-GA-CG-GT-A-C-CC-A-AG-G---------AG-----------GAAGC-CCC-GG-C-TAA---C--T-ACGT--GCCA--G-C---A--GCCG---C-GG--TA-AT--AC---GT-AG-GGG-GCA-A-G-CG-TTGT-C-CGG-AA-TT-A--T-T--GGGC-GTA----AA-GGGC-GC--G-TA-G-G-C-G------------G--A-TA-G-T-T-AA----G-T-C-C---G-GTG-TG-A-AA-GA--TC-AGG-G--------------------------------------------------------------------CT-C-AA-------------------------------------------------------------------------CC-C-T-GA-GA-G----T-G-C-A-T-C--------G--GA-A-A-C-T-G-GGT--A-T-C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------T-T-G-A-G-G-A-----C-AG--GA-G-A------------G-GA-A-AG-T----GG--AATT-CCA-C-GT--GT-A-GCG-GTGAAA-TG-CGT-AGAT-A-TG-T-GGA--GG-A-AC-A-CC-AG--T--G--GC-GAA-G--G-C---G----A--C-T-TTCTG------G-AC-TG--------------------------------------------------------------TA-A-C-T--GA--CG-----CT-GA-GG--C-G-CGA--AA-G-C--------------G-T-GGGGAG-C-A-AACA--GG-ATTA-G-ATA-C-----CC-T-G-GTA-G-T----C-CA--C-G-CTG-T-AAA--C-GATG-AG--TG-CT---------A-GG--T--G-T-AG-A-GG-G----------------------------------------------------------------------------------------TATC-GA-CC-------------------------------------------------------------------------------------------------------------------------------------------------C-C-TT--C-T-G-T-GC-C------GC--A----GT-TAA--CA-C-A-A--T--AA-GC--A----C-TCC-GCC-T-G-GG-GAG-TA---CGG-----C-C--G-C-A-A-GGT-T--GAA-ACTC-AAA---------GGAA-TTG-ACGGG-G-G-CCCG----C-A--C-A-A-GCG-GT-G--G--AG-CA-T--GT-GGT-TT-AATT-C-G-ACG-CAAC-G-CG-A-AG-A-A-CC-TT-A-CC-AGGGC-TT-G-AC-A-T-C--------------CTC-T-G-------------A-AC-T-T-G--CT--G-GA-A-A-C--A-G-G--A-A-G-G--T-GC----CC-------------------------------------T--TC-G------------------------------------------GG---GA----G---CA-GAG---A--GA---------------------------------------------------C-A-G-G-T-GGTG-CA-TGG-TT--GTC-GTC-A-GC-TC---G-TG-TC-G--TGA-GA-TGT-T-GG-G-TT-AA-AT-CCCGC-AA--------C-GAG-CGC-A-ACC-C-C-TG--TA--T-TTAG--T-T-G-C-T---AA-C-G--C--G-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------TAGA-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------G----G----C-G------------A----G---C-A--CT---------------C-T-G-G-A-TA-G--AC-T-G-CCG--G-T------------------------------------G-A---TAA----------------------------------A-C-C-G--G-A-GG-A--AGG-T--GGGG-A-TGAC-GTC--AAAT-C---ATC-A-T-G-C-C-C-CTT----AT-G--TT-C-T-GG-GC-TA-CAC-ACGTG-C--TA--CAATG---G-CCGG-T-A--C-AGA-CG-GA--------------------------------------------------------------------------------------------------A-G-C-G-A--A-GCCG-C--G---------------------------------------A-GG-C-G-----------G--A-G-CA---A----------A--TCC-G------A-G-AAAGC-CG-G-T-C-T-CAG-TTC--------GGA-T-TGCAG-GC--T-GCAA-CT-C-------------------------------------------------------------------------------------------------G-CCTGC-A-T-G-AA-G-TC-GGAAT-CG-C-TA--G-TA-AT-C-G-C----AGG-TC-A-G-C-------AT--ACT-GC-G-GT-G-AAT-ACGT-T-CCCGGGCCT-TGTA----CACACCG-CCC-GTC-----A---CA--CCA-CG-AA-A--G---TCT-G-CA-AC-ACC--C-GAA------G--C-CGG-TG-A-G-G-T-AA-C-C-G-----------------------------------------------------------A-CT-C-----------------------------------------------------------------------------------------------------GAG-A--TT-C--G---AG-GCT--CG--AAG-T----T--------CGA------------------------GG--ATC-GAAG-TG-TAA-GCGAAATTA-ATAAG-TCTT-A--GTAA-AGCT-AAAA-AGCATTAAGACCGA-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>11469739 1..1595
------------------------------------------------------------------------------------------------------------AGAGTTT-GA--T-CC-T-G-GCTC-AG-GA-TGAA-C-GC--TGG-C--G-GC-G-TG--C----C-T--AACACA-T-GC-A-AGT-CGA-A-CGA---------G-A---AG-C--------TAACTT--------------CTGA-T-TCC---------------------------------------------------TTC-G---------------------------------------------------------------------------------GGAT-GATGAGGTTA------------GC--A--GA--A--AG-T-GG-C-GA-A--C-------------GGG-TGAGT-A--AC-GC-G-T-G-GG---TAA--T-CT-A--C-C-CTG--TAAG------------------------------------------------------------------T-GG----GGG-AT-AA-CCC-------------------------T-C-C-----------------------GAA-A---GGA-GGG-CTAA-TA---CC-G--C-AT-A----------A--------------------T-A--T-C-T--T--T-----------------AT---CC-C-----------------------------------------------------------------------------------------------------------------------A-AA-A--------------------------------------------------------------------------------------------------------------------------------------G-A-G--G-T---------------A--A-A-G-A-T-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------TAAA--G-A-T-GG-----C--CT-C--TA-------------------------------------------------------------------------------------------------------------------TAC-----------------------------------------------------------------------------------------------------------------------TA-T--GC-TA--T---C-G--------------C----T-T---C-AG-G---AT---G-A-----G-TCC-GCG--T-CCT--A------TT--A--G-TT-A----G---TTGG-T-G-GG-G-T----AAT-GG-C-C-T-ACCA--A-GA-C-G--A-CA-A------------TGG-G-T------AG-CC-G-G-TCT-G-AG----A--GG-AT--G-TA-C-GG-CCAC-A-CT-GG-GA-C-TG-A-GA-T-AC-G-G-CCCAGA-CTCC-TAC-G--G-G-A-G-AC-A-GC-A-G-TG---GG-G-A-ATA-TTGCG-C-AA-T-GG--GG-GA-A----A-C-CC-T-GA-CG-CA-GCGA-CGCC-G-CG-T---G-G-A--T--GA-T-G--A--A-G-G-CC-----CT-TG---------G-G-T-T-G-T--A---AA-A-TCC--------TG-TT-C-T--G-GGG----GA-A--G---AA-AGCT---TAAA-GG-T--C--C--AA-T---A----A---A-CC-C-TTA-AGC-CT-GA-CG-GT-A-C-CC-C-AA-G---------AG-----------AAAGC-TCC-GG-C-TAA---T--T-ATGT--GCCA--G-C---A--GCCG---C-GG--TA-AT--AC---AT-AA-GGA-GCA-A-G-CG-TTAT-C-CGG-AA-TT-A--T-T--GGGC-GTA----AA-GAGC-TC--G-TA-G-G-C-G------------G--T-CT-T-A-A-AA----G-T-C-A---G-TTG-TG-A-AA-TT--AT-CAG-G--------------------------------------------------------------------CT-C-AA-------------------------------------------------------------------------CC-T-G-AT-AA-G----G-T-C-A-T-C--------T--GA-A-A-C-T-C-TAA--G-A-C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------T-T-G-A-G-G-T-----T-AG--AA-G-A------------G-GA-A-AG-T----GG--AATT-CCC-G-GT--GT-A-GCG-GTGAAA-TG-CGT-AGAT-A-TC-G-GGA--GG-A-AC-A-CC-AG--T--G--GC-GAA-G--G-C---G----G--C-T-TTCTG------G-TC-TA--------------------------------------------------------------TC-T-C-T--GA--CG-----CT-GA-GG--A-G-CGA--AA-G-C--------------T-AGGG-GAG-C-A-AACG--GG-ATTA-G-ATA-C-----CC-C-G-GTA-G-T----C-CT--A-G-CTG-T-AAA--C-GATG-GA--TA-CT---------A-GG--T--G-T-GG-G-AG-G----------------------------------------------------------------------------------------TATC-GA-CC-------------------------------------------------------------------------------------------------------------------------------------------------C-C-TT--C-T-G-T-GC-C------GT--A----GC-TAA--CG-C-A-T--T--AA-GT--A----T-CCC-GCC-T-G-GG-GAG-TA---CGG-----T-C--G-C-A-A-GGC-T--GAA-ACTC-AAA---------GGAA-TTG-ACGGG-G-G-CCCG----C-A--C-A-A-GCG-GT-G--G--AG-CA-T--GT-GGT-TT-AATT-C-G-ACG-CAAC-G-CG-A-AG-A-A-CC-TT-A-CC-GGGAC-TT-G-AC-A-T-T------------A-TCT-T-G-------------C-CC-G-T-C--TA--A-GA-A-A-T--T-A-G--A-T-C-T--T-CT----TCC------------------------------------T--T--T-----------------------------------------GGA---AG----A---CA-GGA---T--AA---------------------------------------------------C-A-G-G-T-GGTG-CA-TGG-TT--GTC-GTC-A-GC-TC---G-TG-TC-G--TGA-GA-TGT-T-GG-G-TT-AA-GT-CCCAC-AA--------C-GAG-CGC-A-ACC-C-T-TG--TG--C-TTAG--T-T-G-C-T---AA-C-T--T--GT------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------TTT--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------A---C----A----A-G------------T----G---C-A--CT---------------C-T-A-G-G-CA-G--AC-T-G-CCG--C-A------------------------------------G-A---TAA----------------------------------T-G-C-G--G-A-GG-A--AGG-T--GGGG-A-TGAC-GTC--AAAT-C---ATC-A-T-G-C-C-C-CTT----AC-G--TC-C-C-GG-GC-TA-CAC-ACGTG-C--TA--CAATG---G-CCTG-T-A--C-AGA-GG-GT--------------------------------------------------------------------------------------------------A-G-C-G-A--A-AGAG-C--G---------------------------------------A-TC-T-T-----------A--A-G-CC---A----------A--TCC-C------A-A-AAAGC-AG-G-C-C-C-CAG-TTC--------GGA-T-TGGAG-GC--T-GCAA-CT-C-------------------------------------------------------------------------------------------------G-CCTCC-A-T-G-AA-G-TA-GGAAT-CG-C-TA--G-TA-AT-C-G-C----GGA-TC-A-G-C-------AT--GCC-GC-G-GT-G-AAT-ACGT-T-CCCGGGCCT-TGTA----CACACCG-CCC-GTC-----A---CA--CCA-CG-AA-A--G---TTG-G-CG-AT-ACC--T-GAA------G--T-TAC-TA-G-G-C-T-AA-C-C-TG-----------------------------------------------------GCACTCAACTAA-------------------------------------------------------------------------------------------------GT--TC-A--CT-A--A--CTTATTTGCTT--AAA-A----T-AAG-GCTTAATG----------------------TG--CTT-AGTT-GA-GTG-CCGGGAGGC-AGGTA-CCGA-AGGTATG-GCTGGCGATTGGGGTGAAGTCGTA-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>11460543 1..1587
-------------------------------------------------------------------------------------------------------------TGGTTT-GA--T-CC-T-G-GCTC-AG-GA-CAAA-C-GC--TGG-C--G-GC-G-TG--C----C-T--AACACA-T-GC-A-AGT-CGA-A-CGA---------G-A---AG-C--------CAGCTT--------------TTGA-T-TCC---------------------------------------------------TTC-G---------------------------------------------------------------------------------GGAT-GAGAAAGCAG------------GT--A--GA--A--AG-T-GG-C-GA-A--C-------------GGG-TGAGT-A--AC-GC-G-T-G-GG---TAA--T-CT-A--C-C-CTG--TAAG------------------------------------------------------------------T-AG----GGG-AT-AA-CCC-------------------------T-C-T-----------------------GAA-A---AGA-GGG-CTAA-TA---CC-G--C-AT-A----------A--------------------T-A--T-C-T--T--T-----------------AC---CC-C-----------------------------------------------------------------------------------------------------------------------A-TA-A--------------------------------------------------------------------------------------------------------------------------------------G-A-A--G-T---------------A--A-A-G-A-T-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------TAAA--G-A-T-GG-----C--CT-C--TG-------------------------------------------------------------------------------------------------------------------TA------------------------------------------------------------------------------------------------------------------------TA-T--GC-TA--T---C-G--------------C----T-T---C-AG-G---AT---G-A-----G-CCC-GCG--T-CCT--A------TT--A--G-TT-A----G---TTGG-T-A-AG-G-T----AAT-GG-C-T-T-ACCA--A-GA-C-C--A-CG-A------------TGG-G-T------AG-CC-G-G-TCT-G-AG----A--GG-AT--G-TA-C-GG-CCAC-A-CT-GG-GA-C-TG-A-GA-T-AC-G-G-CCCAGA-CTCC-TAC-G--G-G-A-G-GC-A-GC-A-G-TG---GG-G-A-ATA-TTGCG-C-AA-T-GG--GG-GA-A----A-C-CC-T-GA-CG-CA-GCGA-CGCC-G-CG-T---G-G-A--T--GA-T-G--A--A-G-G-CC-----TT-CG---------G-G-T-T-G-T--A---AA-A-TCC--------TG-TT-T-T--G-GGG----GA-C--G---AA-ACCT---TAAG-GG-T--C--C--AA-T---A----A---A-CC-C-TTA-A-A-TT-GA-CG-GT-A-C-CC-C-AA-G---------AG-----------AAAGC-TCC-GG-C-TAA---T--T-ATGT--GCCA--G-C---A--GCCG---C-GG--TA-AT--AC---AT-AA-GGA-GCA-A-G-CG-TTGT-C-CGG-AA-TT-A--T-T--GGGC-GTA----AA-GAGT-TC--G-TA-G-G-C-G------------G--T-CT-T-A-A-AA----G-T-C-A---G-GTG-TG-A-AA-TT--AT-CAG-G--------------------------------------------------------------------CT-T-AA-------------------------------------------------------------------------CC-T-G-AT-AC-G----G-T-C-A-T-C--------T--GA-A-A-C-T-T-TAA--G-A-C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------T-T-G-A-G-G-T-----T-AG--GA-G-A------------G-GA-A-AG-T----GG--AATT-CCC-G-GT--GT-A-GCG-GTGAAA-TG-CGT-AGAT-A-TC-G-GGA--GG-A-AC-A-CC-AG--T--G--GC-GAA-G--G-C---G----G--C-T-TTCTG------G-CC-TA--------------------------------------------------------------AC-T-C-T--GA--CG-----CT-GA-GG--A-A-CGA--AA-G-C--------------T-AGGG-GAG-C-A-AACG--GG-ATTA-G-ATA-C-----CC-C-G-GTA-G-T----C-CT--A-G-CTG-T-AAA--C-GATG-GA--TA-CT---------A-GG--T--G-T-GG-G-AG-G----------------------------------------------------------------------------------------TATC-GA-CC-------------------------------------------------------------------------------------------------------------------------------------------------C-C-TT--C-T-G-T-GC-C------GW--C----AC-TAA--CG-C-A-T--T--AA-GT--A----T-CCC-GCC-T-G-GG-GAG-TA---CGG-----T-C--G-C-A-A-GGC-T--AAA-ACTC-AAA---------GGAA-TTG-ACGGG-G-G-CCCG----C-A--C-A-A-GCG-GT-G--G--AG-CA-T--GT-GGT-TT-AATT-C-G-ACG-CAAC-G-CG-A-AG-A-A-CC-TT-A-CC-GGGGC-TT-G-AC-A-T-T------------G-TCT-T-G-------------C-CC-G-T-T--TA--A-GA-A-A-T--T-A-A--A-T-T-T--T-CT----TCC---------------------------------CTTT--TA-G-----------------------------------------GGA---AG----A---CA-AGA---T--AA---------------------------------------------------C-A-G-G-T-GGTG-CA-TGG-TT--GTC-GTC-A-GC-TC---G-TG-TC-G--TGA-GA-TGT-T-GG-G-TT-AA-GT-CCCAC-AA--------C-GAG-CGC-A-ACC-C-T-TA--TT--C-TTAG--T-T-G-C-T---AG-T-T--T--G-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------TTT--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------A---C----A----A-A------------C----G---C-A--CT---------------C-T-A-A-A-GA-G--AC-T-G-CCG--C-A------------------------------------G-A---TAA----------------------------------T-G-C-G--G-A-GG-A--AGG-T--GGGG-A-TGAC-GTC--AAAT-C---ATC-A-T-G-C-C-C-CTT----AC-G--TC-C-C-GG-GC-TA-CAC-ACGTG-C--TA--CAATG---G-CCTG-T-A--C-AGA-GG-GT--------------------------------------------------------------------------------------------------A-G-C-G-A--A-AGAG-C--G---------------------------------------A-TC-T-C-----------A--A-G-CT---A----------A--TCC-C------T-T-AAAAC-AG-G-T-C-T-CAG-TTC--------GGA-T-TGGAG-GC--T-GCAA-CT-C-------------------------------------------------------------------------------------------------G-CCTCC-A-T-G-AA-G-TC-GGAAT-CG-C-TA--G-TA-AT-C-G-C----GGA-TC-A-G-C-------AT--GCC-GC-G-GT-G-AAT-ACGT-T-CCCGGGCCT-TGTA----CACACCG-CCC-GTC-----A---CA--CCA-TG-AA-A--G---TTG-G-CG-AT-ACC--T-GAA------G--T-TAC-TG-T-G-C-T-AA-C-C-CG----------------------------------------------------------G-CA-----------------------------------------------------------------------------------------------------C--TC-A--AC-T--A---AG-TAC--AT--TAA-G-TCTT-ATT-TT-AAG------------------------CT--ATT-GTAT-TTAGTTGAGTGCCGGGAGGCAGGTACCTAAGGTATGGCTAGCGATTGGGGTGAAGTCGTA-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>11480408 26..1556
------------------------------------------------------------------------------------------------------------TGAGTTT-GA--T-CC-T-G-GCTC-AG-GA-CGAA-C-GC--TGG-C--G-GC-G-TG--C----T-T--AACACA-T-GC-A-AGT-CGA-A-CGG---------G-G-ATATCCGA-------GC------------------GGA---AG--------------------------------------------------GTTTC-G----------------------------------------------------------------------------------GCCGGAAGGTTGGG-------------T--AT-TC--G--AG-T-GG-C-GG-A--C-------------GGG-TGAGT-A--AC-GC-G-T-G-AG---CAA--T-CT-G--T-C-CCG--GA-C------------------------------------------------------------------A-GG----GGG-AT-AA-CAC-------------------------T-T-G-----------------------GAA-A---CAG-GTG-CTAA-TA---CC-G--C-AT-A----------A--------------------G-A--C-C-A--C--A-----------------GC---AT-C-----------------------------------------------------------------------------------------------------------------------G-CA-T--------------------------------------------------------------------------------------------------------------------------------------G-G-T--G-C---------------A--G-G-G-G-T-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------AAAA--G-G----------------------------------------------------------------------------------------------------------------------------------------AGC-G----------------------------------------------------------------------------------------------------------------------------------A---TCCG-------------G----T-C---T-GG-G---GT---G-A-----G-CTC-GCG--T-CCG--A------TT--A--G-AT-A----G---TTGG-T-G-AG-G-T----AAC-GG-C-C-C-ACCA--AGT--C-A--A-CG-A------------TCG-G-T------AG-CC-G-A-CCT-G-AG----A--GG-GT--G-AT-C-GG-CCAC-A-TTGGA--A-C-TG-A-GA-G-AC-G-G-TCCAAA-CTCC-TAC-G--G-G-A-G-GC-A-GC-A-G-TG---GG-G-A-ATA-TTGGG-C-AA-T-GG--GC-GA-A----A-G-CC-T-GA-CC-CA-GCAA-CGCC-G-CG-T---G-A-G--T--GA-A-G--A--A-G-G-CC-----TT-CG---------G-G-T-T-G-T--A---AA-G-CTC--------TG-TT-A-T--G-CGA----GA-C--G---A-----------------------AGGAAG-----------------------------T-GA-CG-GT-A-T-CG-C-AT-A---------AG-----------GAAGC-CCC-GG-C-TAA---C--T-ACGT--GCCA--G-C---A--GCCG---C-GG--TA-AT--AC---GT-AG-GGG-GCG-A-G-CG-TTGT-C-CGG-AA-TG-A--C-T--GGGC-GTA----AA-GGGC-GT--G-TA-G-G-C-G------------G----CC-G-TTT-AA----G-T-A-T---G-GAG-TG-A-AA-GT--CC-ATT-T--------------------------------------------------------------------TT-C-AA-------------------------------------------------------------------------GG-A-T-GG-AA-T----T-G-C-T-T-T--------G--TA-G-A-C-T-GGATG--G---C---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------T-T-G-A-G-T-G-----C-GG--AA-G-A------------G-GT-A-AG-T----GG--AATT-CCC-A-GT--GT-A-GCG-GTGAAA-TG-CGT-AGAG-A-TT-G-GGA--GG-A-AC-A-CC-AG--T--G--GC-GAA-G--G-C---G----A--C-T-TACTG------G-GC-CG--------------------------------------------------------------TA-A-C-T--GA--CG-----CT-GA-GG--C-G-CGA--AA-G-C--------------G-TGGG-G-AGC-G-AACA--GG-ATTA-G-ATA-C-----CC-T-G-GTA-G-T----C-CA--C-G-CGG-T-AAA--C-GATG-AA--TG-CT---------A-GG--T--G-T-TGCG-GG-T--A--T-------------------------------------------------------------------------------------C-GA----------------------------------------------------------------------------------------------------------------------------------------------C---C-C-C-TG--C-A-G-T-GC-C------GG--A----GT-AAA--CA-C-A-A--T--AA-GC--A----T-TCC-GCC-T-G-GG-GAG-TA---CGG-----C-C--G-C-A-A-GGT-T--GAA-ACTC-AAG---------GGAA-TTG-ACGGG-G-G-CCCG----C-A--C-A-A-GCA-GC-G--G--AG-CA-T--GT-TGT-TT-AATT-C-G-AAG-CAAC-G-CG-A-AG-A-A-CC-TT-A-CC-AGGTC-TT-G-AC-A-T-C-------------C-A--GTT-------------A-AG---C-T-CAT--A-GA-G-A-T--A-T-G-AG---G-T--C-------CC-------------------------------------T--TC-G------------------------------------------GG---------G---GAAC-T-G-A--GA---------------------------------------------------C-A-G-G-T-GGTG-CA-TGG-TT--GTC-GTC-A-GC-TC---G-TG-TC-G--TGA-GA-TGT-T-GG-G-TT-AA-GT-CCCGC-AA--------C-GAG-CGC-A-ACC-C-T-TA--TG--G-TCAG--T-T-A-C-T---AA-C-G--C--G-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------TGAA-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------G----G----C-G------------A----G---G-A--CT---------------C-T-G-A-C-GA-G--AC-T-G-CCG--G-G------------------------------------G-A---CAA----------------------------------C-T-C-G--G-A-GG-A--AGG-T--GGGG-A-CGAC-GTC--AAAT-C---ATC-A-T-G-C-C-C-CTT----AT-G--AC-C-T-GG-GC-TA-CAA-ACGTG-C--TA--CAATG---G-TGAC-T-A--C-AAA-GA-GG--------------------------------------------------------------------------------------------------A-G-C-G-A--G-ACTG-T--A---------------------------------------A-AG-T-G-----------G--A-G-CG---G----------A--TCT-C------A-A-AAAAG-TC-A-T-C-C-CAG-TTC--------GGA-T-TGTGG-GC--T-GCAA-CC-C-------------------------------------------------------------------------------------------------G-CCCAC-A-T-G-AA-G-TT-GGAGT-TG-C-TA--G-TA-AT-C-G-C----GGA-TC-A-G--C------AT--GCC-GC-G-GT-G-AAT-ACGT-T-CCCGGGCCT-TGTA----CACACCG-CCC-GTC-----A---CA--CCA-TG-GG-A--G---TTG-G-GA-GC-ACC--C-GAA------G--T-CAG-TG-A-G-G-T-AA-C-C-------------------------------------------------------------G-GA-A------------------------------------------------------------------------------------------------------GG-A--GC-C--A---GC-TGC--CG--AAG-G----T-GAG-AC-CGA------------------------TG--ACT-GGGG-TG-AAG-TCGTAACAA-GGTAG-CCGT-ATCGGAA-GGTG-CGGC-TGGATCACCTCCTTA----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
"""
if __name__ == "__main__":
main()
| gpl-2.0 |
ShassAro/ShassAro | DockerAdmin/dockerVirtualEnv/lib/python2.7/site-packages/django/test/html.py | 116 | 7962 | """
Comparing two html documents.
"""
from __future__ import unicode_literals
import re
from django.utils.encoding import force_text
from django.utils.html_parser import HTMLParser, HTMLParseError
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
WHITESPACE = re.compile('\s+')
def normalize_whitespace(string):
return WHITESPACE.sub(' ', string)
@python_2_unicode_compatible
class Element(object):
def __init__(self, name, attributes):
self.name = name
self.attributes = sorted(attributes)
self.children = []
def append(self, element):
if isinstance(element, six.string_types):
element = force_text(element)
element = normalize_whitespace(element)
if self.children:
if isinstance(self.children[-1], six.string_types):
self.children[-1] += element
self.children[-1] = normalize_whitespace(self.children[-1])
return
elif self.children:
# removing last children if it is only whitespace
# this can result in incorrect dom representations since
# whitespace between inline tags like <span> is significant
if isinstance(self.children[-1], six.string_types):
if self.children[-1].isspace():
self.children.pop()
if element:
self.children.append(element)
def finalize(self):
def rstrip_last_element(children):
if children:
if isinstance(children[-1], six.string_types):
children[-1] = children[-1].rstrip()
if not children[-1]:
children.pop()
children = rstrip_last_element(children)
return children
rstrip_last_element(self.children)
for i, child in enumerate(self.children):
if isinstance(child, six.string_types):
self.children[i] = child.strip()
elif hasattr(child, 'finalize'):
child.finalize()
def __eq__(self, element):
if not hasattr(element, 'name'):
return False
if hasattr(element, 'name') and self.name != element.name:
return False
if len(self.attributes) != len(element.attributes):
return False
if self.attributes != element.attributes:
# attributes without a value is same as attribute with value that
# equals the attributes name:
# <input checked> == <input checked="checked">
for i in range(len(self.attributes)):
attr, value = self.attributes[i]
other_attr, other_value = element.attributes[i]
if value is None:
value = attr
if other_value is None:
other_value = other_attr
if attr != other_attr or value != other_value:
return False
if self.children != element.children:
return False
return True
def __hash__(self):
return hash((self.name,) + tuple(a for a in self.attributes))
def __ne__(self, element):
return not self.__eq__(element)
def _count(self, element, count=True):
if not isinstance(element, six.string_types):
if self == element:
return 1
i = 0
for child in self.children:
# child is text content and element is also text content, then
# make a simple "text" in "text"
if isinstance(child, six.string_types):
if isinstance(element, six.string_types):
if count:
i += child.count(element)
elif element in child:
return 1
else:
i += child._count(element, count=count)
if not count and i:
return i
return i
def __contains__(self, element):
return self._count(element, count=False) > 0
def count(self, element):
return self._count(element, count=True)
def __getitem__(self, key):
return self.children[key]
def __str__(self):
output = '<%s' % self.name
for key, value in self.attributes:
if value:
output += ' %s="%s"' % (key, value)
else:
output += ' %s' % key
if self.children:
output += '>\n'
output += ''.join(six.text_type(c) for c in self.children)
output += '\n</%s>' % self.name
else:
output += ' />'
return output
def __repr__(self):
return six.text_type(self)
@python_2_unicode_compatible
class RootElement(Element):
def __init__(self):
super(RootElement, self).__init__(None, ())
def __str__(self):
return ''.join(six.text_type(c) for c in self.children)
class Parser(HTMLParser):
SELF_CLOSING_TAGS = ('br', 'hr', 'input', 'img', 'meta', 'spacer',
'link', 'frame', 'base', 'col')
def __init__(self):
HTMLParser.__init__(self)
self.root = RootElement()
self.open_tags = []
self.element_positions = {}
def error(self, msg):
raise HTMLParseError(msg, self.getpos())
def format_position(self, position=None, element=None):
if not position and element:
position = self.element_positions[element]
if position is None:
position = self.getpos()
if hasattr(position, 'lineno'):
position = position.lineno, position.offset
return 'Line %d, Column %d' % position
@property
def current(self):
if self.open_tags:
return self.open_tags[-1]
else:
return self.root
def handle_startendtag(self, tag, attrs):
self.handle_starttag(tag, attrs)
if tag not in self.SELF_CLOSING_TAGS:
self.handle_endtag(tag)
def handle_starttag(self, tag, attrs):
# Special case handling of 'class' attribute, so that comparisons of DOM
# instances are not sensitive to ordering of classes.
attrs = [
(name, " ".join(sorted(value.split(" "))))
if name == "class"
else (name, value)
for name, value in attrs
]
element = Element(tag, attrs)
self.current.append(element)
if tag not in self.SELF_CLOSING_TAGS:
self.open_tags.append(element)
self.element_positions[element] = self.getpos()
def handle_endtag(self, tag):
if not self.open_tags:
self.error("Unexpected end tag `%s` (%s)" % (
tag, self.format_position()))
element = self.open_tags.pop()
while element.name != tag:
if not self.open_tags:
self.error("Unexpected end tag `%s` (%s)" % (
tag, self.format_position()))
element = self.open_tags.pop()
def handle_data(self, data):
self.current.append(data)
def handle_charref(self, name):
self.current.append('&%s;' % name)
def handle_entityref(self, name):
self.current.append('&%s;' % name)
def parse_html(html):
"""
Takes a string that contains *valid* HTML and turns it into a Python object
structure that can be easily compared against other HTML on semantic
equivalence. Syntactical differences like which quotation is used on
arguments will be ignored.
"""
parser = Parser()
parser.feed(html)
parser.close()
document = parser.root
document.finalize()
# Removing ROOT element if it's not necessary
if len(document.children) == 1:
if not isinstance(document.children[0], six.string_types):
document = document.children[0]
return document
| gpl-2.0 |
TNT-Samuel/Coding-Projects | DNS Server/Source/Lib/site-packages/urllib3/util/retry.py | 24 | 15104 | from __future__ import absolute_import
import time
import logging
from collections import namedtuple
from itertools import takewhile
import email
import re
from ..exceptions import (
ConnectTimeoutError,
MaxRetryError,
ProtocolError,
ReadTimeoutError,
ResponseError,
InvalidHeader,
)
from ..packages import six
log = logging.getLogger(__name__)
# Data structure for representing the metadata of requests that result in a retry.
RequestHistory = namedtuple('RequestHistory', ["method", "url", "error",
"status", "redirect_location"])
class Retry(object):
""" Retry configuration.
Each retry attempt will create a new Retry object with updated values, so
they can be safely reused.
Retries can be defined as a default for a pool::
retries = Retry(connect=5, read=2, redirect=5)
http = PoolManager(retries=retries)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool)::
response = http.request('GET', 'http://example.com/', retries=Retry(10))
Retries can be disabled by passing ``False``::
response = http.request('GET', 'http://example.com/', retries=False)
Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
retries are disabled, in which case the causing exception will be raised.
:param int total:
Total number of retries to allow. Takes precedence over other counts.
Set to ``None`` to remove this constraint and fall back on other
counts. It's a good idea to set this to some sensibly-high value to
account for unexpected edge cases and avoid infinite retry loops.
Set to ``0`` to fail on the first retry.
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
:param int connect:
How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request.
Set to ``0`` to fail on the first retry of this type.
:param int read:
How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects.
Set to ``0`` to fail on the first retry of this type.
:param int redirect:
How many redirects to perform. Limit this to avoid infinite redirect
loops.
A redirect is a HTTP response with a status code 301, 302, 303, 307 or
308.
Set to ``0`` to fail on the first retry of this type.
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
:param int status:
How many times to retry on bad status codes.
These are retries made on responses, where status code matches
``status_forcelist``.
Set to ``0`` to fail on the first retry of this type.
:param iterable method_whitelist:
Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be
idempotent (multiple requests with the same parameters end with the
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
Set to a ``False`` value to retry on any verb.
:param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ``method_whitelist``
and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``.
:param float backoff_factor:
A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a
delay). urllib3 will sleep for::
{backoff factor} * (2 ^ ({number of total retries} - 1))
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
than :attr:`Retry.BACKOFF_MAX`.
By default, backoff is disabled (set to 0).
:param bool raise_on_redirect: Whether, if the number of redirects is
exhausted, to raise a MaxRetryError, or to return a response with a
response code in the 3xx range.
:param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
whether we should raise an exception, or return a response,
if status falls in ``status_forcelist`` range and retries have
been exhausted.
:param tuple history: The history of the request encountered during
each call to :meth:`~Retry.increment`. The list is in the order
the requests occurred. Each list item is of class :class:`RequestHistory`.
:param bool respect_retry_after_header:
Whether to respect Retry-After header on status codes defined as
:attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
:param iterable remove_headers_on_redirect:
Sequence of headers to remove from the request when a response
indicating a redirect is returned before firing off the redirected
request.
"""
DEFAULT_METHOD_WHITELIST = frozenset([
'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization'])
#: Maximum backoff time.
BACKOFF_MAX = 120
def __init__(self, total=10, connect=None, read=None, redirect=None, status=None,
method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
backoff_factor=0, raise_on_redirect=True, raise_on_status=True,
history=None, respect_retry_after_header=True,
remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST):
self.total = total
self.connect = connect
self.read = read
self.status = status
if redirect is False or total is False:
redirect = 0
raise_on_redirect = False
self.redirect = redirect
self.status_forcelist = status_forcelist or set()
self.method_whitelist = method_whitelist
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
self.raise_on_status = raise_on_status
self.history = history or tuple()
self.respect_retry_after_header = respect_retry_after_header
self.remove_headers_on_redirect = remove_headers_on_redirect
def new(self, **kw):
params = dict(
total=self.total,
connect=self.connect, read=self.read, redirect=self.redirect, status=self.status,
method_whitelist=self.method_whitelist,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect,
raise_on_status=self.raise_on_status,
history=self.history,
remove_headers_on_redirect=self.remove_headers_on_redirect
)
params.update(kw)
return type(self)(**params)
@classmethod
def from_int(cls, retries, redirect=True, default=None):
""" Backwards-compatibility for the old retries format."""
if retries is None:
retries = default if default is not None else cls.DEFAULT
if isinstance(retries, Retry):
return retries
redirect = bool(redirect) and None
new_retries = cls(retries, redirect=redirect)
log.debug("Converted retries value: %r -> %r", retries, new_retries)
return new_retries
def get_backoff_time(self):
""" Formula for computing the current backoff
:rtype: float
"""
# We want to consider only the last consecutive errors sequence (Ignore redirects).
consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None,
reversed(self.history))))
if consecutive_errors_len <= 1:
return 0
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
return min(self.BACKOFF_MAX, backoff_value)
def parse_retry_after(self, retry_after):
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
if re.match(r"^\s*[0-9]+\s*$", retry_after):
seconds = int(retry_after)
else:
retry_date_tuple = email.utils.parsedate(retry_after)
if retry_date_tuple is None:
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
retry_date = time.mktime(retry_date_tuple)
seconds = retry_date - time.time()
if seconds < 0:
seconds = 0
return seconds
def get_retry_after(self, response):
""" Get the value of Retry-After in seconds. """
retry_after = response.getheader("Retry-After")
if retry_after is None:
return None
return self.parse_retry_after(retry_after)
def sleep_for_retry(self, response=None):
retry_after = self.get_retry_after(response)
if retry_after:
time.sleep(retry_after)
return True
return False
def _sleep_backoff(self):
backoff = self.get_backoff_time()
if backoff <= 0:
return
time.sleep(backoff)
def sleep(self, response=None):
""" Sleep between retry attempts.
This method will respect a server's ``Retry-After`` response header
and sleep the duration of the time requested. If that is not present, it
will use an exponential backoff. By default, the backoff factor is 0 and
this method will return immediately.
"""
if response:
slept = self.sleep_for_retry(response)
if slept:
return
self._sleep_backoff()
def _is_connection_error(self, err):
""" Errors when we're fairly sure that the server did not receive the
request, so it should be safe to retry.
"""
return isinstance(err, ConnectTimeoutError)
def _is_read_error(self, err):
""" Errors that occur after the request has been started, so we should
assume that the server began processing it.
"""
return isinstance(err, (ReadTimeoutError, ProtocolError))
def _is_method_retryable(self, method):
""" Checks if a given HTTP method should be retried upon, depending if
it is included on the method whitelist.
"""
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False
return True
def is_retry(self, method, status_code, has_retry_after=False):
""" Is this method/status code retryable? (Based on whitelists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
be retried upon on the presence of the aforementioned header)
"""
if not self._is_method_retryable(method):
return False
if self.status_forcelist and status_code in self.status_forcelist:
return True
return (self.total and self.respect_retry_after_header and
has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES))
def is_exhausted(self):
""" Are we out of retries? """
retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
retry_counts = list(filter(None, retry_counts))
if not retry_counts:
return False
return min(retry_counts) < 0
def increment(self, method=None, url=None, response=None, error=None,
_pool=None, _stacktrace=None):
""" Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not
return a response.
:type response: :class:`~urllib3.response.HTTPResponse`
:param Exception error: An error encountered during the request, or
None if the response was received successfully.
:return: A new ``Retry`` object.
"""
if self.total is False and error:
# Disabled, indicate to re-raise the error.
raise six.reraise(type(error), error, _stacktrace)
total = self.total
if total is not None:
total -= 1
connect = self.connect
read = self.read
redirect = self.redirect
status_count = self.status
cause = 'unknown'
status = None
redirect_location = None
if error and self._is_connection_error(error):
# Connect retry?
if connect is False:
raise six.reraise(type(error), error, _stacktrace)
elif connect is not None:
connect -= 1
elif error and self._is_read_error(error):
# Read retry?
if read is False or not self._is_method_retryable(method):
raise six.reraise(type(error), error, _stacktrace)
elif read is not None:
read -= 1
elif response and response.get_redirect_location():
# Redirect retry?
if redirect is not None:
redirect -= 1
cause = 'too many redirects'
redirect_location = response.get_redirect_location()
status = response.status
else:
# Incrementing because of a server error like a 500 in
# status_forcelist and a the given method is in the whitelist
cause = ResponseError.GENERIC_ERROR
if response and response.status:
if status_count is not None:
status_count -= 1
cause = ResponseError.SPECIFIC_ERROR.format(
status_code=response.status)
status = response.status
history = self.history + (RequestHistory(method, url, error, status, redirect_location),)
new_retry = self.new(
total=total,
connect=connect, read=read, redirect=redirect, status=status_count,
history=history)
if new_retry.is_exhausted():
raise MaxRetryError(_pool, url, error or ResponseError(cause))
log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
return new_retry
def __repr__(self):
return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
'read={self.read}, redirect={self.redirect}, status={self.status})').format(
cls=type(self), self=self)
# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
| gpl-3.0 |
olgabrani/synnefo | snf-cyclades-app/synnefo/logic/management/commands/subnet-create.py | 10 | 5234 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from optparse import make_option
from django.core.management.base import CommandError
from synnefo.management import common
from snf_django.management.commands import SynnefoCommand
from snf_django.management.utils import parse_bool
from synnefo.management import pprint
from synnefo.logic import subnets
HELP_MSG = """
Create a new subnet without authenticating the user. The limit of one
IPv4/IPv6 subnet per network still applies. Mandatory fields are CIDR and the
Network ID.
"""
class Command(SynnefoCommand):
help = "Create a new Subnet." + HELP_MSG
option_list = SynnefoCommand.option_list + (
make_option("--network", dest="network_id",
help="Specify the Network to attach the subnet. To get the"
" networks of a user, use snf-manage network-list"),
make_option("--cidr", dest="cidr",
help="The CIDR of the subnet, e.g., 192.168.42.0/24"),
make_option("--allocation-pool", dest="allocation_pools",
action="append",
help="IP allocation pools to be used for assigning IPs to"
" VMs. Can be used multiple times. Syntax: \n"
"192.168.42.220,192.168.42.240. Starting IP must proceed "
"ending IP.20,192.168.42.240. Starting IP must proceed "
"ending IP. If no allocation pools are given, the whole "
"subnet range is used, excluding the gateway IP, the "
"broadcast address and the network address"),
make_option("--name", dest="name",
help="An arbitrary string for naming the subnet."),
make_option("--ip-version", dest="ipversion", choices=["4", "6"],
metavar="4|6",
help="IP version of the CIDR. The value must be in sync"
" with the CIDR. Default value: 4"),
make_option("--gateway", dest="gateway",
help="An IP to use as a gateway for the subnet."
" The IP must be inside the CIDR range and cannot be the"
" subnet or broadcast IP. If no value is specified, a"
" gateway will not be set."),
make_option("--dhcp", dest="dhcp", default="True",
choices=["True", "False"], metavar="True|False",
help="Value for DHCP/SLAAC. True by default."),
make_option("--dns", dest="dns",
help="DNS nameservers to be used by the VMs in the subnet."
" For the time being, this option isn't supported."),
make_option("--host-routes", dest="host_routes",
help="Host routes to be used for advanced routing"
"settings. For the time being, this option isn't"
" supported.")
)
@common.convert_api_faults
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
network_id = options["network_id"]
cidr = options["cidr"]
if not network_id:
raise CommandError("network is mandatory")
if not cidr:
raise CommandError("cidr is mandatory")
user_id = common.get_resource("network", network_id).userid
name = options["name"] or ""
allocation_pools = options["allocation_pools"]
ipversion = options["ipversion"] or 4
ipversion = int(ipversion)
gateway = options["gateway"]
dhcp = parse_bool(options["dhcp"])
dns = options["dns"]
host_routes = options["host_routes"]
alloc = None
if allocation_pools is not None:
alloc = subnets.parse_allocation_pools(allocation_pools)
alloc.sort()
sub = subnets.create_subnet(name=name,
network_id=network_id,
cidr=cidr,
allocation_pools=alloc,
gateway=gateway,
ipversion=ipversion,
dhcp=dhcp,
slaac=dhcp,
dns_nameservers=dns,
host_routes=host_routes,
user_id=user_id)
pprint.pprint_subnet_in_db(sub, stdout=self.stdout)
self.stdout.write("\n\n")
pprint.pprint_ippool(sub, stdout=self.stdout)
| gpl-3.0 |
allthroughthenight/aces | web2py/gluon/contrib/qdb.py | 43 | 32143 | #!/usr/bin/env python
# coding:utf-8
"Queues(Pipe)-based independent remote client-server Python Debugger"
__author__ = "Mariano Reingart ([email protected])"
__copyright__ = "Copyright (C) 2011 Mariano Reingart"
__license__ = "LGPL 3.0"
__version__ = "1.01b"
# remote debugger queue-based (jsonrpc-like interface):
# - bidirectional communication (request - response calls in both ways)
# - request with id == null is a notification (do not send a response)
# - request with a value for id is a normal call, wait response
# based on idle, inspired by pythonwin implementation, taken many code from pdb
import bdb
import inspect
import linecache
import os
import sys
import traceback
import cmd
import pydoc
import threading
class Qdb(bdb.Bdb):
"Qdb Debugger Backend"
def __init__(self, pipe, redirect_stdio=True, allow_interruptions=False,
skip=[__name__]):
kwargs = {}
if sys.version_info > (2, 7):
kwargs['skip'] = skip
bdb.Bdb.__init__(self, **kwargs)
self.frame = None
self.i = 1 # sequential RPC call id
self.waiting = False
self.pipe = pipe # for communication
self._wait_for_mainpyfile = False
self._wait_for_breakpoint = False
self.mainpyfile = ""
self._lineno = None # last listed line numbre
# replace system standard input and output (send them thru the pipe)
if redirect_stdio:
sys.stdin = self
sys.stdout = self
sys.stderr = self
if allow_interruptions:
# fake breakpoint to prevent removing trace_dispatch on set_continue
self.breaks[None] = []
self.allow_interruptions = allow_interruptions
self.burst = 0 # do not send notifications ("burst" mode)
self.params = {} # optional parameters for interaction
def pull_actions(self):
# receive a remote procedure call from the frontend:
# returns True if action processed
# None when 'run' notification is received (see 'startup')
request = self.pipe.recv()
if request.get("method") == 'run':
return None
response = {'version': '1.1', 'id': request.get('id'),
'result': None,
'error': None}
try:
# dispatch message (JSON RPC like)
method = getattr(self, request['method'])
response['result'] = method.__call__(*request['args'],
**request.get('kwargs', {}))
except Exception, e:
response['error'] = {'code': 0, 'message': str(e)}
# send the result for normal method calls, not for notifications
if request.get('id'):
self.pipe.send(response)
return True
# Override Bdb methods
def trace_dispatch(self, frame, event, arg):
# check for non-interaction rpc (set_breakpoint, interrupt)
while self.allow_interruptions and self.pipe.poll():
self.pull_actions()
# process the frame (see Bdb.trace_dispatch)
if self.quitting:
return # None
if event == 'line':
return self.dispatch_line(frame)
if event == 'call':
return self.dispatch_call(frame, arg)
if event == 'return':
return self.dispatch_return(frame, arg)
if event == 'exception':
return self.dispatch_exception(frame, arg)
return self.trace_dispatch
def user_call(self, frame, argument_list):
"""This method is called when there is the remote possibility
that we ever need to stop in this function."""
if self._wait_for_mainpyfile or self._wait_for_breakpoint:
return
if self.stop_here(frame):
self.interaction(frame, None)
def user_line(self, frame):
"""This function is called when we stop or break at this line."""
if self._wait_for_mainpyfile:
if (not self.canonic(frame.f_code.co_filename).startswith(self.mainpyfile)
or frame.f_lineno <= 0):
return
self._wait_for_mainpyfile = 0
if self._wait_for_breakpoint:
if not self.break_here(frame):
return
self._wait_for_breakpoint = 0
self.interaction(frame)
def user_exception(self, frame, info):
"""This function is called if an exception occurs,
but only if we are to stop at or just below this level."""
if self._wait_for_mainpyfile or self._wait_for_breakpoint:
return
extype, exvalue, trace = info
# pre-process stack trace as it isn't pickeable (cannot be sent pure)
msg = ''.join(traceback.format_exception(extype, exvalue, trace))
trace = traceback.extract_tb(trace)
title = traceback.format_exception_only(extype, exvalue)[0]
# send an Exception notification
msg = {'method': 'exception',
'args': (title, extype.__name__, exvalue, trace, msg),
'id': None}
self.pipe.send(msg)
self.interaction(frame, info)
def run(self, code, interp=None, *args, **kwargs):
try:
return bdb.Bdb.run(self, code, *args, **kwargs)
finally:
pass
def runcall(self, function, interp=None, *args, **kwargs):
try:
self.interp = interp
return bdb.Bdb.runcall(self, function, *args, **kwargs)
finally:
pass
def _runscript(self, filename):
# The script has to run in __main__ namespace (clear it)
import __main__
import imp
__main__.__dict__.clear()
__main__.__dict__.update({"__name__": "__main__",
"__file__": filename,
"__builtins__": __builtins__,
"imp": imp, # need for run
})
# avoid stopping before we reach the main script
self._wait_for_mainpyfile = 1
self.mainpyfile = self.canonic(filename)
self._user_requested_quit = 0
statement = 'imp.load_source("__main__", "%s")' % filename
# notify and wait frontend to set initial params and breakpoints
self.pipe.send({'method': 'startup', 'args': (__version__, )})
while self.pull_actions() is not None:
pass
self.run(statement)
# General interaction function
def interaction(self, frame, info=None):
# chache frame locals to ensure that modifications are not overwritten
self.frame_locals = frame and frame.f_locals or {}
# extract current filename and line number
code, lineno = frame.f_code, frame.f_lineno
filename = code.co_filename
basename = os.path.basename(filename)
message = "%s:%s" % (basename, lineno)
if code.co_name != "?":
message = "%s: %s()" % (message, code.co_name)
# wait user events
self.waiting = True
self.frame = frame
try:
while self.waiting:
# sync_source_line()
if frame and filename[:1] + filename[-1:] != "<>" and os.path.exists(filename):
line = linecache.getline(filename, self.frame.f_lineno,
self.frame.f_globals)
else:
line = ""
# send the notification (debug event) - DOESN'T WAIT RESPONSE
self.burst -= 1
if self.burst < 0:
kwargs = {}
if self.params.get('call_stack'):
kwargs['call_stack'] = self.do_where()
if self.params.get('environment'):
kwargs['environment'] = self.do_environment()
self.pipe.send({'method': 'interaction', 'id': None,
'args': (filename, self.frame.f_lineno, line),
'kwargs': kwargs})
self.pull_actions()
finally:
self.waiting = False
self.frame = None
def do_debug(self, mainpyfile=None, wait_breakpoint=1):
self.reset()
if not wait_breakpoint or mainpyfile:
self._wait_for_mainpyfile = 1
if not mainpyfile:
frame = sys._getframe().f_back
mainpyfile = frame.f_code.co_filename
self.mainpyfile = self.canonic(mainpyfile)
self._wait_for_breakpoint = wait_breakpoint
sys.settrace(self.trace_dispatch)
def set_trace(self, frame=None):
# start debugger interaction immediatelly
if frame is None:
frame = sys._getframe().f_back
self._wait_for_mainpyfile = frame.f_code.co_filename
self._wait_for_breakpoint = 0
bdb.Bdb.set_trace(self, frame)
# Command definitions, called by interaction()
def do_continue(self):
self.set_continue()
self.waiting = False
def do_step(self):
self.set_step()
self.waiting = False
def do_return(self):
self.set_return(self.frame)
self.waiting = False
def do_next(self):
self.set_next(self.frame)
self.waiting = False
def interrupt(self):
self.set_step()
def do_quit(self):
self.set_quit()
self.waiting = False
def do_jump(self, lineno):
arg = int(lineno)
try:
self.frame.f_lineno = arg
return arg
except ValueError, e:
print '*** Jump failed:', e
return False
def do_list(self, arg):
last = None
if arg:
if isinstance(arg, tuple):
first, last = arg
else:
first = arg
elif not self._lineno:
first = max(1, self.frame.f_lineno - 5)
else:
first = self._lineno + 1
if last is None:
last = first + 10
filename = self.frame.f_code.co_filename
breaklist = self.get_file_breaks(filename)
lines = []
for lineno in range(first, last + 1):
line = linecache.getline(filename, lineno,
self.frame.f_globals)
if not line:
lines.append((filename, lineno, '', current, "<EOF>\n"))
break
else:
breakpoint = "B" if lineno in breaklist else ""
current = "->" if self.frame.f_lineno == lineno else ""
lines.append((filename, lineno, breakpoint, current, line))
self._lineno = lineno
return lines
def do_read(self, filename):
return open(filename, "Ur").read()
def do_set_breakpoint(self, filename, lineno, temporary=0, cond=None):
return self.set_break(filename, int(lineno), temporary, cond)
def do_list_breakpoint(self):
breaks = []
if self.breaks: # There's at least one
for bp in bdb.Breakpoint.bpbynumber:
if bp:
breaks.append((bp.number, bp.file, bp.line,
bp.temporary, bp.enabled, bp.hits, bp.cond, ))
return breaks
def do_clear_breakpoint(self, filename, lineno):
self.clear_break(filename, lineno)
def do_clear_file_breakpoints(self, filename):
self.clear_all_file_breaks(filename)
def do_clear(self, arg):
# required by BDB to remove temp breakpoints!
err = self.clear_bpbynumber(arg)
if err:
print '*** DO_CLEAR failed', err
def do_eval(self, arg, safe=True):
ret = eval(arg, self.frame.f_globals,
self.frame_locals)
if safe:
ret = pydoc.cram(repr(ret), 255)
return ret
def do_exec(self, arg):
locals = self.frame_locals
globals = self.frame.f_globals
code = compile(arg + '\n', '<stdin>', 'single')
save_displayhook = sys.displayhook
self.displayhook_value = None
try:
sys.displayhook = self.displayhook
exec code in globals, locals
finally:
sys.displayhook = save_displayhook
return self.displayhook_value
def do_where(self):
"print_stack_trace"
stack, curindex = self.get_stack(self.frame, None)
lines = []
for frame, lineno in stack:
filename = frame.f_code.co_filename
line = linecache.getline(filename, lineno)
lines.append((filename, lineno, "", "", line, ))
return lines
def do_environment(self):
"return current frame local and global environment"
env = {'locals': {}, 'globals': {}}
# converts the frame global and locals to a short text representation:
if self.frame:
for name, value in self.frame_locals.items():
env['locals'][name] = pydoc.cram(repr(
value), 255), repr(type(value))
for name, value in self.frame.f_globals.items():
env['globals'][name] = pydoc.cram(repr(
value), 20), repr(type(value))
return env
def get_autocomplete_list(self, expression):
"Return list of auto-completion options for expression"
try:
obj = self.do_eval(expression)
except:
return []
else:
return dir(obj)
def get_call_tip(self, expression):
"Return list of auto-completion options for expression"
try:
obj = self.do_eval(expression)
except Exception, e:
return ('', '', str(e))
else:
name = ''
try:
name = obj.__name__
except AttributeError:
pass
argspec = ''
drop_self = 0
f = None
try:
if inspect.isbuiltin(obj):
pass
elif inspect.ismethod(obj):
# Get the function from the object
f = obj.im_func
drop_self = 1
elif inspect.isclass(obj):
# Get the __init__ method function for the class.
if hasattr(obj, '__init__'):
f = obj.__init__.im_func
else:
for base in object.__bases__:
if hasattr(base, '__init__'):
f = base.__init__.im_func
break
if f is not None:
drop_self = 1
elif callable(obj):
# use the obj as a function by default
f = obj
# Get the __call__ method instead.
f = obj.__call__.im_func
drop_self = 0
except AttributeError:
pass
if f:
argspec = apply(inspect.formatargspec, inspect.getargspec(f))
doc = ''
if callable(obj):
try:
doc = inspect.getdoc(obj)
except:
pass
return (name, argspec[1:-1], doc.strip())
def set_burst(self, val):
"Set burst mode -multiple command count- (shut up notifications)"
self.burst = val
def set_params(self, params):
"Set parameters for interaction"
self.params.update(params)
def displayhook(self, obj):
"""Custom displayhook for the do_exec which prevents
assignment of the _ variable in the builtins.
"""
self.displayhook_value = repr(obj)
def reset(self):
bdb.Bdb.reset(self)
self.waiting = False
self.frame = None
def post_mortem(self, t=None):
# handling the default
if t is None:
# sys.exc_info() returns (type, value, traceback) if an exception is
# being handled, otherwise it returns None
t = sys.exc_info()[2]
if t is None:
raise ValueError("A valid traceback must be passed if no "
"exception is being handled")
self.reset()
# get last frame:
while t is not None:
frame = t.tb_frame
t = t.tb_next
code, lineno = frame.f_code, frame.f_lineno
filename = code.co_filename
line = linecache.getline(filename, lineno)
#(filename, lineno, "", current, line, )}
self.interaction(frame)
# console file-like object emulation
def readline(self):
"Replacement for stdin.readline()"
msg = {'method': 'readline', 'args': (), 'id': self.i}
self.pipe.send(msg)
msg = self.pipe.recv()
self.i += 1
return msg['result']
def readlines(self):
"Replacement for stdin.readlines()"
lines = []
while lines[-1:] != ['\n']:
lines.append(self.readline())
return lines
def write(self, text):
"Replacement for stdout.write()"
msg = {'method': 'write', 'args': (text, ), 'id': None}
self.pipe.send(msg)
def writelines(self, l):
map(self.write, l)
def flush(self):
pass
def isatty(self):
return 0
class QueuePipe(object):
"Simulated pipe for threads (using two queues)"
def __init__(self, name, in_queue, out_queue):
self.__name = name
self.in_queue = in_queue
self.out_queue = out_queue
def send(self, data):
self.out_queue.put(data, block=True)
def recv(self, count=None, timeout=None):
data = self.in_queue.get(block=True, timeout=timeout)
return data
def poll(self, timeout=None):
return not self.in_queue.empty()
def close(self):
pass
class RPCError(RuntimeError):
"Remote Error (not user exception)"
pass
class Frontend(object):
"Qdb generic Frontend interface"
def __init__(self, pipe):
self.i = 1
self.pipe = pipe
self.notifies = []
self.read_lock = threading.RLock()
self.write_lock = threading.RLock()
def recv(self):
self.read_lock.acquire()
try:
return self.pipe.recv()
finally:
self.read_lock.release()
def send(self, data):
self.write_lock.acquire()
try:
return self.pipe.send(data)
finally:
self.write_lock.release()
def startup(self):
self.send({'method': 'run', 'args': (), 'id': None})
def interaction(self, filename, lineno, line, *kwargs):
raise NotImplementedError
def exception(self, title, extype, exvalue, trace, request):
"Show a user_exception"
raise NotImplementedError
def write(self, text):
"Console output (print)"
raise NotImplementedError
def readline(self, text):
"Console input/rawinput"
raise NotImplementedError
def run(self):
"Main method dispatcher (infinite loop)"
if self.pipe:
if not self.notifies:
# wait for a message...
request = self.recv()
else:
# process an asyncronus notification received earlier
request = self.notifies.pop(0)
return self.process_message(request)
def process_message(self, request):
if request:
result = None
if request.get("error"):
# it is not supposed to get an error here
# it should be raised by the method call
raise RPCError(res['error']['message'])
elif request.get('method') == 'interaction':
self.interaction(*request.get("args"), **request.get("kwargs"))
elif request.get('method') == 'startup':
self.startup()
elif request.get('method') == 'exception':
self.exception(*request['args'])
elif request.get('method') == 'write':
self.write(*request.get("args"))
elif request.get('method') == 'readline':
result = self.readline()
if result:
response = {'version': '1.1', 'id': request.get('id'),
'result': result,
'error': None}
self.send(response)
return True
def call(self, method, *args):
"Actually call the remote method (inside the thread)"
req = {'method': method, 'args': args, 'id': self.i}
self.send(req)
self.i += 1 # increment the id
while 1:
# wait until command acknowledge (response id match the request)
res = self.recv()
if 'id' not in res or not res['id']:
# nested notification received (i.e. write)! process it!
self.process_message(res)
elif 'result' not in res:
# nested request received (i.e. readline)! process it!
self.process_message(res)
elif long(req['id']) != long(res['id']):
print "DEBUGGER wrong packet received: expecting id", req[
'id'], res['id']
# protocol state is unknown
elif 'error' in res and res['error']:
raise RPCError(res['error']['message'])
else:
return res['result']
def do_step(self, arg=None):
"Execute the current line, stop at the first possible occasion"
self.call('do_step')
def do_next(self, arg=None):
"Execute the current line, do not stop at function calls"
self.call('do_next')
def do_continue(self, arg=None):
"Continue execution, only stop when a breakpoint is encountered."
self.call('do_continue')
def do_return(self, arg=None):
"Continue execution until the current function returns"
self.call('do_return')
def do_jump(self, arg):
"Set the next line that will be executed."
res = self.call('do_jump', arg)
print res
def do_where(self, arg=None):
"Print a stack trace, with the most recent frame at the bottom."
return self.call('do_where')
def do_quit(self, arg=None):
"Quit from the debugger. The program being executed is aborted."
self.call('do_quit')
def do_eval(self, expr):
"Inspect the value of the expression"
return self.call('do_eval', expr)
def do_environment(self):
"List all the locals and globals variables (string representation)"
return self.call('do_environment')
def do_list(self, arg=None):
"List source code for the current file"
return self.call('do_list', arg)
def do_read(self, filename):
"Read and send a local filename"
return self.call('do_read', filename)
def do_set_breakpoint(self, filename, lineno, temporary=0, cond=None):
"Set a breakpoint at filename:breakpoint"
self.call('do_set_breakpoint', filename, lineno, temporary, cond)
def do_clear_breakpoint(self, filename, lineno):
"Remove a breakpoint at filename:breakpoint"
self.call('do_clear_breakpoint', filename, lineno)
def do_clear_file_breakpoints(self, filename):
"Remove all breakpoints at filename"
self.call('do_clear_breakpoints', filename, lineno)
def do_list_breakpoint(self):
"List all breakpoints"
return self.call('do_list_breakpoint')
def do_exec(self, statement):
return self.call('do_exec', statement)
def get_autocomplete_list(self, expression):
return self.call('get_autocomplete_list', expression)
def get_call_tip(self, expression):
return self.call('get_call_tip', expression)
def interrupt(self):
"Immediately stop at the first possible occasion (outside interaction)"
# this is a notification!, do not expect a response
req = {'method': 'interrupt', 'args': ()}
self.send(req)
def set_burst(self, value):
req = {'method': 'set_burst', 'args': (value, )}
self.send(req)
def set_params(self, params):
req = {'method': 'set_params', 'args': (params, )}
self.send(req)
class Cli(Frontend, cmd.Cmd):
"Qdb Front-end command line interface"
def __init__(self, pipe, completekey='tab', stdin=None, stdout=None, skip=None):
cmd.Cmd.__init__(self, completekey, stdin, stdout)
Frontend.__init__(self, pipe)
# redefine Frontend methods:
def run(self):
while 1:
try:
Frontend.run(self)
except KeyboardInterrupt:
print "Interupting..."
self.interrupt()
def interaction(self, filename, lineno, line):
print "> %s(%d)\n-> %s" % (filename, lineno, line),
self.filename = filename
self.cmdloop()
def exception(self, title, extype, exvalue, trace, request):
print "=" * 80
print "Exception", title
print request
print "-" * 80
def write(self, text):
print text,
def readline(self):
return raw_input()
def postcmd(self, stop, line):
return not line.startswith("h") # stop
do_h = cmd.Cmd.do_help
do_s = Frontend.do_step
do_n = Frontend.do_next
do_c = Frontend.do_continue
do_r = Frontend.do_return
do_j = Frontend.do_jump
do_q = Frontend.do_quit
def do_eval(self, args):
"Inspect the value of the expression"
print Frontend.do_eval(self, args)
def do_list(self, args):
"List source code for the current file"
lines = Frontend.do_list(self, eval(args, {}, {}) if args else None)
self.print_lines(lines)
def do_where(self, args):
"Print a stack trace, with the most recent frame at the bottom."
lines = Frontend.do_where(self)
self.print_lines(lines)
def do_environment(self, args=None):
env = Frontend.do_environment(self)
for key in env:
print "=" * 78
print key.capitalize()
print "-" * 78
for name, value in env[key].items():
print "%-12s = %s" % (name, value)
def do_list_breakpoint(self, arg=None):
"List all breakpoints"
breaks = Frontend.do_list_breakpoint(self)
print "Num File Line Temp Enab Hits Cond"
for bp in breaks:
print '%-4d%-30s%4d %4s %4s %4d %s' % bp
print
def do_set_breakpoint(self, arg):
"Set a breakpoint at filename:breakpoint"
if arg:
if ':' in arg:
args = arg.split(":")
else:
args = (self.filename, arg)
Frontend.do_set_breakpoint(self, *args)
else:
self.do_list_breakpoint()
do_b = do_set_breakpoint
do_l = do_list
do_p = do_eval
do_w = do_where
do_e = do_environment
def default(self, line):
"Default command"
if line[:1] == '!':
print self.do_exec(line[1:])
else:
print "*** Unknown command: ", line
def print_lines(self, lines):
for filename, lineno, bp, current, source in lines:
print "%s:%4d%s%s\t%s" % (filename, lineno, bp, current, source),
print
def test():
def f(pipe):
print "creating debugger"
qdb = Qdb(pipe=pipe, redirect_stdio=False)
print "set trace"
my_var = "Mariano!"
qdb.set_trace()
print "hello world!"
print "good by!"
saraza
if '--process' in sys.argv:
from multiprocessing import Process, Pipe
pipe, child_conn = Pipe()
p = Process(target=f, args=(child_conn,))
else:
from threading import Thread
from Queue import Queue
parent_queue, child_queue = Queue(), Queue()
front_conn = QueuePipe("parent", parent_queue, child_queue)
child_conn = QueuePipe("child", child_queue, parent_queue)
p = Thread(target=f, args=(child_conn,))
p.start()
import time
class Test(Frontend):
def interaction(self, *args):
print "interaction!", args
def exception(self, *args):
print "exception", args
#raise RuntimeError("exception %s" % repr(args))
qdb = Test(front_conn)
time.sleep(5)
while 1:
print "running..."
Frontend.run(qdb)
time.sleep(1)
print "do_next"
qdb.do_next()
p.join()
def connect(host="localhost", port=6000, authkey='secret password'):
"Connect to a running debugger backend"
address = (host, port)
from multiprocessing.connection import Client
print "qdb debugger fronted: waiting for connection to", address
conn = Client(address, authkey=authkey)
try:
Cli(conn).run()
except EOFError:
pass
finally:
conn.close()
def main(host='localhost', port=6000, authkey='secret password'):
"Debug a script and accept a remote frontend"
if not sys.argv[1:] or sys.argv[1] in ("--help", "-h"):
print "usage: pdb.py scriptfile [arg] ..."
sys.exit(2)
mainpyfile = sys.argv[1] # Get script filename
if not os.path.exists(mainpyfile):
print 'Error:', mainpyfile, 'does not exist'
sys.exit(1)
del sys.argv[0] # Hide "pdb.py" from argument list
# Replace pdb's dir with script's dir in front of module search path.
sys.path[0] = os.path.dirname(mainpyfile)
from multiprocessing.connection import Listener
address = (host, port) # family is deduced to be 'AF_INET'
listener = Listener(address, authkey=authkey)
print "qdb debugger backend: waiting for connection at", address
conn = listener.accept()
print 'qdb debugger backend: connected to', listener.last_accepted
# create the backend
qdb = Qdb(conn, redirect_stdio=True, allow_interruptions=True)
try:
print "running", mainpyfile
qdb._runscript(mainpyfile)
print "The program finished"
except SystemExit:
# In most cases SystemExit does not warrant a post-mortem session.
print "The program exited via sys.exit(). Exit status: ",
print sys.exc_info()[1]
raise
except:
raise
conn.close()
listener.close()
qdb = None
def set_trace(host='localhost', port=6000, authkey='secret password'):
"Simplified interface to debug running programs"
global qdb, listener, conn
from multiprocessing.connection import Listener
# only create it if not currently instantiated
if not qdb:
address = (host, port) # family is deduced to be 'AF_INET'
listener = Listener(address, authkey=authkey)
conn = listener.accept()
# create the backend
qdb = Qdb(conn)
# start debugger backend:
qdb.set_trace()
def quit():
"Remove trace and quit"
global qdb, listener, conn
if qdb:
sys.settrace(None)
qdb = None
if conn:
conn.close()
conn = None
if listener:
listener.close()
listener = None
if __name__ == '__main__':
# When invoked as main program:
if '--test' in sys.argv:
test()
# Check environment for configuration parameters:
kwargs = {}
for param in 'host', 'port', 'authkey':
if 'QDB_%s' % param.upper() in os.environ:
kwargs[param] = os.environ['QDB_%s' % param.upper()]
if not sys.argv[1:]:
# connect to a remote debbuger
connect(**kwargs)
else:
# start the debugger on a script
# reimport as global __main__ namespace is destroyed
import qdb
qdb.main(**kwargs)
| gpl-3.0 |
evanma92/routeh | flask/lib/python2.7/site-packages/pip/_vendor/html5lib/sanitizer.py | 805 | 16428 | from __future__ import absolute_import, division, unicode_literals
import re
from xml.sax.saxutils import escape, unescape
from .tokenizer import HTMLTokenizer
from .constants import tokenTypes
class HTMLSanitizerMixin(object):
""" sanitization of XHTML+MathML+SVG and of inline style attributes."""
acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area',
'article', 'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button',
'canvas', 'caption', 'center', 'cite', 'code', 'col', 'colgroup',
'command', 'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn',
'dialog', 'dir', 'div', 'dl', 'dt', 'em', 'event-source', 'fieldset',
'figcaption', 'figure', 'footer', 'font', 'form', 'header', 'h1',
'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 'ins',
'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map', 'menu', 'meter',
'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup', 'option',
'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select',
'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong',
'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot',
'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video']
mathml_elements = ['maction', 'math', 'merror', 'mfrac', 'mi',
'mmultiscripts', 'mn', 'mo', 'mover', 'mpadded', 'mphantom',
'mprescripts', 'mroot', 'mrow', 'mspace', 'msqrt', 'mstyle', 'msub',
'msubsup', 'msup', 'mtable', 'mtd', 'mtext', 'mtr', 'munder',
'munderover', 'none']
svg_elements = ['a', 'animate', 'animateColor', 'animateMotion',
'animateTransform', 'clipPath', 'circle', 'defs', 'desc', 'ellipse',
'font-face', 'font-face-name', 'font-face-src', 'g', 'glyph', 'hkern',
'linearGradient', 'line', 'marker', 'metadata', 'missing-glyph',
'mpath', 'path', 'polygon', 'polyline', 'radialGradient', 'rect',
'set', 'stop', 'svg', 'switch', 'text', 'title', 'tspan', 'use']
acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey',
'action', 'align', 'alt', 'autocomplete', 'autofocus', 'axis',
'background', 'balance', 'bgcolor', 'bgproperties', 'border',
'bordercolor', 'bordercolordark', 'bordercolorlight', 'bottompadding',
'cellpadding', 'cellspacing', 'ch', 'challenge', 'char', 'charoff',
'choff', 'charset', 'checked', 'cite', 'class', 'clear', 'color',
'cols', 'colspan', 'compact', 'contenteditable', 'controls', 'coords',
'data', 'datafld', 'datapagesize', 'datasrc', 'datetime', 'default',
'delay', 'dir', 'disabled', 'draggable', 'dynsrc', 'enctype', 'end',
'face', 'for', 'form', 'frame', 'galleryimg', 'gutter', 'headers',
'height', 'hidefocus', 'hidden', 'high', 'href', 'hreflang', 'hspace',
'icon', 'id', 'inputmode', 'ismap', 'keytype', 'label', 'leftspacing',
'lang', 'list', 'longdesc', 'loop', 'loopcount', 'loopend',
'loopstart', 'low', 'lowsrc', 'max', 'maxlength', 'media', 'method',
'min', 'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'open',
'optimum', 'pattern', 'ping', 'point-size', 'poster', 'pqg', 'preload',
'prompt', 'radiogroup', 'readonly', 'rel', 'repeat-max', 'repeat-min',
'replace', 'required', 'rev', 'rightspacing', 'rows', 'rowspan',
'rules', 'scope', 'selected', 'shape', 'size', 'span', 'src', 'start',
'step', 'style', 'summary', 'suppress', 'tabindex', 'target',
'template', 'title', 'toppadding', 'type', 'unselectable', 'usemap',
'urn', 'valign', 'value', 'variable', 'volume', 'vspace', 'vrml',
'width', 'wrap', 'xml:lang']
mathml_attributes = ['actiontype', 'align', 'columnalign', 'columnalign',
'columnalign', 'columnlines', 'columnspacing', 'columnspan', 'depth',
'display', 'displaystyle', 'equalcolumns', 'equalrows', 'fence',
'fontstyle', 'fontweight', 'frame', 'height', 'linethickness', 'lspace',
'mathbackground', 'mathcolor', 'mathvariant', 'mathvariant', 'maxsize',
'minsize', 'other', 'rowalign', 'rowalign', 'rowalign', 'rowlines',
'rowspacing', 'rowspan', 'rspace', 'scriptlevel', 'selection',
'separator', 'stretchy', 'width', 'width', 'xlink:href', 'xlink:show',
'xlink:type', 'xmlns', 'xmlns:xlink']
svg_attributes = ['accent-height', 'accumulate', 'additive', 'alphabetic',
'arabic-form', 'ascent', 'attributeName', 'attributeType',
'baseProfile', 'bbox', 'begin', 'by', 'calcMode', 'cap-height',
'class', 'clip-path', 'color', 'color-rendering', 'content', 'cx',
'cy', 'd', 'dx', 'dy', 'descent', 'display', 'dur', 'end', 'fill',
'fill-opacity', 'fill-rule', 'font-family', 'font-size',
'font-stretch', 'font-style', 'font-variant', 'font-weight', 'from',
'fx', 'fy', 'g1', 'g2', 'glyph-name', 'gradientUnits', 'hanging',
'height', 'horiz-adv-x', 'horiz-origin-x', 'id', 'ideographic', 'k',
'keyPoints', 'keySplines', 'keyTimes', 'lang', 'marker-end',
'marker-mid', 'marker-start', 'markerHeight', 'markerUnits',
'markerWidth', 'mathematical', 'max', 'min', 'name', 'offset',
'opacity', 'orient', 'origin', 'overline-position',
'overline-thickness', 'panose-1', 'path', 'pathLength', 'points',
'preserveAspectRatio', 'r', 'refX', 'refY', 'repeatCount',
'repeatDur', 'requiredExtensions', 'requiredFeatures', 'restart',
'rotate', 'rx', 'ry', 'slope', 'stemh', 'stemv', 'stop-color',
'stop-opacity', 'strikethrough-position', 'strikethrough-thickness',
'stroke', 'stroke-dasharray', 'stroke-dashoffset', 'stroke-linecap',
'stroke-linejoin', 'stroke-miterlimit', 'stroke-opacity',
'stroke-width', 'systemLanguage', 'target', 'text-anchor', 'to',
'transform', 'type', 'u1', 'u2', 'underline-position',
'underline-thickness', 'unicode', 'unicode-range', 'units-per-em',
'values', 'version', 'viewBox', 'visibility', 'width', 'widths', 'x',
'x-height', 'x1', 'x2', 'xlink:actuate', 'xlink:arcrole',
'xlink:href', 'xlink:role', 'xlink:show', 'xlink:title', 'xlink:type',
'xml:base', 'xml:lang', 'xml:space', 'xmlns', 'xmlns:xlink', 'y',
'y1', 'y2', 'zoomAndPan']
attr_val_is_uri = ['href', 'src', 'cite', 'action', 'longdesc', 'poster',
'xlink:href', 'xml:base']
svg_attr_val_allows_ref = ['clip-path', 'color-profile', 'cursor', 'fill',
'filter', 'marker', 'marker-start', 'marker-mid', 'marker-end',
'mask', 'stroke']
svg_allow_local_href = ['altGlyph', 'animate', 'animateColor',
'animateMotion', 'animateTransform', 'cursor', 'feImage', 'filter',
'linearGradient', 'pattern', 'radialGradient', 'textpath', 'tref',
'set', 'use']
acceptable_css_properties = ['azimuth', 'background-color',
'border-bottom-color', 'border-collapse', 'border-color',
'border-left-color', 'border-right-color', 'border-top-color', 'clear',
'color', 'cursor', 'direction', 'display', 'elevation', 'float', 'font',
'font-family', 'font-size', 'font-style', 'font-variant', 'font-weight',
'height', 'letter-spacing', 'line-height', 'overflow', 'pause',
'pause-after', 'pause-before', 'pitch', 'pitch-range', 'richness',
'speak', 'speak-header', 'speak-numeral', 'speak-punctuation',
'speech-rate', 'stress', 'text-align', 'text-decoration', 'text-indent',
'unicode-bidi', 'vertical-align', 'voice-family', 'volume',
'white-space', 'width']
acceptable_css_keywords = ['auto', 'aqua', 'black', 'block', 'blue',
'bold', 'both', 'bottom', 'brown', 'center', 'collapse', 'dashed',
'dotted', 'fuchsia', 'gray', 'green', '!important', 'italic', 'left',
'lime', 'maroon', 'medium', 'none', 'navy', 'normal', 'nowrap', 'olive',
'pointer', 'purple', 'red', 'right', 'solid', 'silver', 'teal', 'top',
'transparent', 'underline', 'white', 'yellow']
acceptable_svg_properties = ['fill', 'fill-opacity', 'fill-rule',
'stroke', 'stroke-width', 'stroke-linecap', 'stroke-linejoin',
'stroke-opacity']
acceptable_protocols = ['ed2k', 'ftp', 'http', 'https', 'irc',
'mailto', 'news', 'gopher', 'nntp', 'telnet', 'webcal',
'xmpp', 'callto', 'feed', 'urn', 'aim', 'rsync', 'tag',
'ssh', 'sftp', 'rtsp', 'afs']
# subclasses may define their own versions of these constants
allowed_elements = acceptable_elements + mathml_elements + svg_elements
allowed_attributes = acceptable_attributes + mathml_attributes + svg_attributes
allowed_css_properties = acceptable_css_properties
allowed_css_keywords = acceptable_css_keywords
allowed_svg_properties = acceptable_svg_properties
allowed_protocols = acceptable_protocols
# Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
# stripping out all # attributes not in ALLOWED_ATTRIBUTES. Style
# attributes are parsed, and a restricted set, # specified by
# ALLOWED_CSS_PROPERTIES and ALLOWED_CSS_KEYWORDS, are allowed through.
# attributes in ATTR_VAL_IS_URI are scanned, and only URI schemes specified
# in ALLOWED_PROTOCOLS are allowed.
#
# sanitize_html('<script> do_nasty_stuff() </script>')
# => <script> do_nasty_stuff() </script>
# sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')
# => <a>Click here for $100</a>
def sanitize_token(self, token):
# accommodate filters which use token_type differently
token_type = token["type"]
if token_type in list(tokenTypes.keys()):
token_type = tokenTypes[token_type]
if token_type in (tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]):
if token["name"] in self.allowed_elements:
return self.allowed_token(token, token_type)
else:
return self.disallowed_token(token, token_type)
elif token_type == tokenTypes["Comment"]:
pass
else:
return token
def allowed_token(self, token, token_type):
if "data" in token:
attrs = dict([(name, val) for name, val in
token["data"][::-1]
if name in self.allowed_attributes])
for attr in self.attr_val_is_uri:
if attr not in attrs:
continue
val_unescaped = re.sub("[`\000-\040\177-\240\s]+", '',
unescape(attrs[attr])).lower()
# remove replacement characters from unescaped characters
val_unescaped = val_unescaped.replace("\ufffd", "")
if (re.match("^[a-z0-9][-+.a-z0-9]*:", val_unescaped) and
(val_unescaped.split(':')[0] not in
self.allowed_protocols)):
del attrs[attr]
for attr in self.svg_attr_val_allows_ref:
if attr in attrs:
attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
' ',
unescape(attrs[attr]))
if (token["name"] in self.svg_allow_local_href and
'xlink:href' in attrs and re.search('^\s*[^#\s].*',
attrs['xlink:href'])):
del attrs['xlink:href']
if 'style' in attrs:
attrs['style'] = self.sanitize_css(attrs['style'])
token["data"] = [[name, val] for name, val in list(attrs.items())]
return token
def disallowed_token(self, token, token_type):
if token_type == tokenTypes["EndTag"]:
token["data"] = "</%s>" % token["name"]
elif token["data"]:
attrs = ''.join([' %s="%s"' % (k, escape(v)) for k, v in token["data"]])
token["data"] = "<%s%s>" % (token["name"], attrs)
else:
token["data"] = "<%s>" % token["name"]
if token.get("selfClosing"):
token["data"] = token["data"][:-1] + "/>"
if token["type"] in list(tokenTypes.keys()):
token["type"] = "Characters"
else:
token["type"] = tokenTypes["Characters"]
del token["name"]
return token
def sanitize_css(self, style):
# disallow urls
style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
return ''
if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
return ''
clean = []
for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style):
if not value:
continue
if prop.lower() in self.allowed_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
'padding']:
for keyword in value.split():
if not keyword in self.acceptable_css_keywords and \
not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword):
break
else:
clean.append(prop + ': ' + value + ';')
elif prop.lower() in self.allowed_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
class HTMLSanitizer(HTMLTokenizer, HTMLSanitizerMixin):
def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=False, lowercaseAttrName=False, parser=None):
# Change case matching defaults as we only output lowercase html anyway
# This solution doesn't seem ideal...
HTMLTokenizer.__init__(self, stream, encoding, parseMeta, useChardet,
lowercaseElementName, lowercaseAttrName, parser=parser)
def __iter__(self):
for token in HTMLTokenizer.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
| bsd-3-clause |
vault/Sonata | sonata/tray.py | 6 | 5063 |
import gtk, gobject
class TrayIconTips(gtk.Window):
"""Custom tooltips derived from gtk.Window() that allow for markup text and multiple widgets, e.g. a progress bar. ;)"""
MARGIN = 4
def __init__(self):
gtk.Window.__init__(self, gtk.WINDOW_POPUP)
# from gtktooltips.c:gtk_tooltips_force_window
self.set_app_paintable(True)
self.set_resizable(False)
self.set_name("gtk-tooltips")
self.connect('expose-event', self._on__expose_event)
self._show_timeout_id = -1
self.timer_tag = None
self.notif_handler = None
self.use_notifications_location = False
self.notifications_location = 0
self.widget = None
def _calculate_pos(self, widget):
if widget is not None:
try:
x, y = widget.window.get_origin()
if widget.flags() & gtk.NO_WINDOW:
x += widget.allocation.x
y += widget.allocation.y
height = widget.allocation.height
except:
_icon_screen, icon_rect, _icon_orient = widget.get_geometry()
x = icon_rect[0]
y = icon_rect[1]
height = icon_rect[3]
w, h = self.size_request()
screen = self.get_screen()
pointer_screen, px, py, _ = screen.get_display().get_pointer()
if pointer_screen != screen:
px = x
py = y
try:
# Use the monitor that the systemtray icon is on
monitor_num = screen.get_monitor_at_point(x, y)
except:
# No systemtray icon, use the monitor that the pointer is on
monitor_num = screen.get_monitor_at_point(px, py)
monitor = screen.get_monitor_geometry(monitor_num)
try:
# If the tooltip goes off the screen horizontally, realign it so that
# it all displays.
if (x + w) > monitor.x + monitor.width:
x = monitor.x + monitor.width - w
# If the tooltip goes off the screen vertically (i.e. the system tray
# icon is on the bottom of the screen), realign the icon so that it
# shows above the icon.
if ((y + h + height + self.MARGIN) >
monitor.y + monitor.height):
y = y - h - self.MARGIN
else:
y = y + height + self.MARGIN
except:
pass
if not self.use_notifications_location:
try:
return x, y
except:
#Fallback to top-left:
return monitor.x, monitor.y
elif self.notifications_location == 0:
try:
return x, y
except:
#Fallback to top-left:
return monitor.x, monitor.y
elif self.notifications_location == 1:
return monitor.x, monitor.y
elif self.notifications_location == 2:
return monitor.x + monitor.width - w, monitor.y
elif self.notifications_location == 3:
return monitor.x, monitor.y + monitor.height - h
elif self.notifications_location == 4:
return monitor.x + monitor.width - w, monitor.y + monitor.height - h
elif self.notifications_location == 5:
return monitor.x + (monitor.width - w)/2, monitor.y + (monitor.height - h)/2
def _event_handler (self, widget):
widget.connect_after("event-after", self._motion_cb)
def _motion_cb (self, widget, event):
if self.notif_handler != None:
return
if event.type == gtk.gdk.LEAVE_NOTIFY:
self._remove_timer()
if event.type == gtk.gdk.ENTER_NOTIFY:
self._start_delay(widget)
def _start_delay (self, widget):
self.timer_tag = gobject.timeout_add(500, self._tips_timeout, widget)
def _tips_timeout (self, widget):
self.use_notifications_location = False
self._real_display(widget)
def _remove_timer(self):
self.hide()
if self.timer_tag:
gobject.source_remove(self.timer_tag)
self.timer_tag = None
# from gtktooltips.c:gtk_tooltips_paint_window
def _on__expose_event(self, window, _event):
w, h = window.size_request()
window.style.paint_flat_box(window.window,
gtk.STATE_NORMAL, gtk.SHADOW_OUT,
None, window, "tooltip",
0, 0, w, h)
return False
def _real_display(self, widget):
x, y = self._calculate_pos(widget)
self.move(x, y)
self.show()
# Public API
def hide(self):
gtk.Window.hide(self)
gobject.source_remove(self._show_timeout_id)
self._show_timeout_id = -1
self.notif_handler = None
def set_tip (self, widget):
self.widget = widget
self._event_handler (self.widget)
def add_widget (self, widget_to_add):
self.add(widget_to_add)
| gpl-3.0 |
IPVL/swift-kilo | swift/account/backend.py | 11 | 23433 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Pluggable Back-end for Account Server
"""
from uuid import uuid4
import time
import cPickle as pickle
import sqlite3
from swift.common.utils import Timestamp
from swift.common.db import DatabaseBroker, utf8encode
DATADIR = 'accounts'
POLICY_STAT_TRIGGER_SCRIPT = """
CREATE TRIGGER container_insert_ps AFTER INSERT ON container
BEGIN
INSERT OR IGNORE INTO policy_stat
(storage_policy_index, container_count, object_count, bytes_used)
VALUES (new.storage_policy_index, 0, 0, 0);
UPDATE policy_stat
SET container_count = container_count + (1 - new.deleted),
object_count = object_count + new.object_count,
bytes_used = bytes_used + new.bytes_used
WHERE storage_policy_index = new.storage_policy_index;
END;
CREATE TRIGGER container_delete_ps AFTER DELETE ON container
BEGIN
UPDATE policy_stat
SET container_count = container_count - (1 - old.deleted),
object_count = object_count - old.object_count,
bytes_used = bytes_used - old.bytes_used
WHERE storage_policy_index = old.storage_policy_index;
END;
"""
class AccountBroker(DatabaseBroker):
"""Encapsulates working with an account database."""
db_type = 'account'
db_contains_type = 'container'
db_reclaim_timestamp = 'delete_timestamp'
def _initialize(self, conn, put_timestamp, **kwargs):
"""
Create a brand new account database (tables, indices, triggers, etc.)
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
if not self.account:
raise ValueError(
'Attempting to create a new database with no account set')
self.create_container_table(conn)
self.create_account_stat_table(conn, put_timestamp)
self.create_policy_stat_table(conn)
def create_container_table(self, conn):
"""
Create container table which is specific to the account DB.
:param conn: DB connection object
"""
conn.executescript("""
CREATE TABLE container (
ROWID INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
put_timestamp TEXT,
delete_timestamp TEXT,
object_count INTEGER,
bytes_used INTEGER,
deleted INTEGER DEFAULT 0,
storage_policy_index INTEGER DEFAULT 0
);
CREATE INDEX ix_container_deleted_name ON
container (deleted, name);
CREATE TRIGGER container_insert AFTER INSERT ON container
BEGIN
UPDATE account_stat
SET container_count = container_count + (1 - new.deleted),
object_count = object_count + new.object_count,
bytes_used = bytes_used + new.bytes_used,
hash = chexor(hash, new.name,
new.put_timestamp || '-' ||
new.delete_timestamp || '-' ||
new.object_count || '-' || new.bytes_used);
END;
CREATE TRIGGER container_update BEFORE UPDATE ON container
BEGIN
SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT');
END;
CREATE TRIGGER container_delete AFTER DELETE ON container
BEGIN
UPDATE account_stat
SET container_count = container_count - (1 - old.deleted),
object_count = object_count - old.object_count,
bytes_used = bytes_used - old.bytes_used,
hash = chexor(hash, old.name,
old.put_timestamp || '-' ||
old.delete_timestamp || '-' ||
old.object_count || '-' || old.bytes_used);
END;
""" + POLICY_STAT_TRIGGER_SCRIPT)
def create_account_stat_table(self, conn, put_timestamp):
"""
Create account_stat table which is specific to the account DB.
Not a part of Pluggable Back-ends, internal to the baseline code.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
conn.executescript("""
CREATE TABLE account_stat (
account TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
container_count INTEGER,
object_count INTEGER DEFAULT 0,
bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0',
metadata TEXT DEFAULT ''
);
INSERT INTO account_stat (container_count) VALUES (0);
""")
conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ?, status_changed_at = ?
''', (self.account, Timestamp(time.time()).internal, str(uuid4()),
put_timestamp, put_timestamp))
def create_policy_stat_table(self, conn):
"""
Create policy_stat table which is specific to the account DB.
Not a part of Pluggable Back-ends, internal to the baseline code.
:param conn: DB connection object
"""
conn.executescript("""
CREATE TABLE policy_stat (
storage_policy_index INTEGER PRIMARY KEY,
container_count INTEGER DEFAULT 0,
object_count INTEGER DEFAULT 0,
bytes_used INTEGER DEFAULT 0
);
INSERT OR IGNORE INTO policy_stat (
storage_policy_index, container_count, object_count,
bytes_used
)
SELECT 0, container_count, object_count, bytes_used
FROM account_stat
WHERE container_count > 0;
""")
def get_db_version(self, conn):
if self._db_version == -1:
self._db_version = 0
for row in conn.execute('''
SELECT name FROM sqlite_master
WHERE name = 'ix_container_deleted_name' '''):
self._db_version = 1
return self._db_version
def _delete_db(self, conn, timestamp, force=False):
"""
Mark the DB as deleted.
:param conn: DB connection object
:param timestamp: timestamp to mark as deleted
"""
conn.execute("""
UPDATE account_stat
SET delete_timestamp = ?,
status = 'DELETED',
status_changed_at = ?
WHERE delete_timestamp < ? """, (timestamp, timestamp, timestamp))
def _commit_puts_load(self, item_list, entry):
"""See :func:`swift.common.db.DatabaseBroker._commit_puts_load`"""
loaded = pickle.loads(entry.decode('base64'))
# check to see if the update includes policy_index or not
(name, put_timestamp, delete_timestamp, object_count, bytes_used,
deleted) = loaded[:6]
if len(loaded) > 6:
storage_policy_index = loaded[6]
else:
# legacy support during upgrade until first non legacy storage
# policy is defined
storage_policy_index = 0
item_list.append(
{'name': name,
'put_timestamp': put_timestamp,
'delete_timestamp': delete_timestamp,
'object_count': object_count,
'bytes_used': bytes_used,
'deleted': deleted,
'storage_policy_index': storage_policy_index})
def empty(self):
"""
Check if the account DB is empty.
:returns: True if the database has no active containers.
"""
self._commit_puts_stale_ok()
with self.get() as conn:
row = conn.execute(
'SELECT container_count from account_stat').fetchone()
return (row[0] == 0)
def make_tuple_for_pickle(self, record):
return (record['name'], record['put_timestamp'],
record['delete_timestamp'], record['object_count'],
record['bytes_used'], record['deleted'],
record['storage_policy_index'])
def put_container(self, name, put_timestamp, delete_timestamp,
object_count, bytes_used, storage_policy_index):
"""
Create a container with the given attributes.
:param name: name of the container to create
:param put_timestamp: put_timestamp of the container to create
:param delete_timestamp: delete_timestamp of the container to create
:param object_count: number of objects in the container
:param bytes_used: number of bytes used by the container
:param storage_policy_index: the storage policy for this container
"""
if delete_timestamp > put_timestamp and \
object_count in (None, '', 0, '0'):
deleted = 1
else:
deleted = 0
record = {'name': name, 'put_timestamp': put_timestamp,
'delete_timestamp': delete_timestamp,
'object_count': object_count,
'bytes_used': bytes_used,
'deleted': deleted,
'storage_policy_index': storage_policy_index}
self.put_record(record)
def _is_deleted_info(self, status, container_count, delete_timestamp,
put_timestamp):
"""
Apply delete logic to database info.
:returns: True if the DB is considered to be deleted, False otherwise
"""
return status == 'DELETED' or (
container_count in (None, '', 0, '0') and
Timestamp(delete_timestamp) > Timestamp(put_timestamp))
def _is_deleted(self, conn):
"""
Check account_stat table and evaluate info.
:param conn: database conn
:returns: True if the DB is considered to be deleted, False otherwise
"""
info = conn.execute('''
SELECT put_timestamp, delete_timestamp, container_count, status
FROM account_stat''').fetchone()
return self._is_deleted_info(**info)
def is_status_deleted(self):
"""Only returns true if the status field is set to DELETED."""
with self.get() as conn:
row = conn.execute('''
SELECT put_timestamp, delete_timestamp, status
FROM account_stat''').fetchone()
return row['status'] == "DELETED" or (
row['delete_timestamp'] > row['put_timestamp'])
def get_policy_stats(self, do_migrations=False):
"""
Get global policy stats for the account.
:param do_migrations: boolean, if True the policy stat dicts will
always include the 'container_count' key;
otherwise it may be omitted on legacy databases
until they are migrated.
:returns: dict of policy stats where the key is the policy index and
the value is a dictionary like {'object_count': M,
'bytes_used': N, 'container_count': L}
"""
columns = [
'storage_policy_index',
'container_count',
'object_count',
'bytes_used',
]
def run_query():
return (conn.execute('''
SELECT %s
FROM policy_stat
''' % ', '.join(columns)).fetchall())
self._commit_puts_stale_ok()
info = []
with self.get() as conn:
try:
info = run_query()
except sqlite3.OperationalError as err:
if "no such column: container_count" in str(err):
if do_migrations:
self._migrate_add_container_count(conn)
else:
columns.remove('container_count')
info = run_query()
elif "no such table: policy_stat" not in str(err):
raise
policy_stats = {}
for row in info:
stats = dict(row)
key = stats.pop('storage_policy_index')
policy_stats[key] = stats
return policy_stats
def get_info(self):
"""
Get global data for the account.
:returns: dict with keys: account, created_at, put_timestamp,
delete_timestamp, status_changed_at, container_count,
object_count, bytes_used, hash, id
"""
self._commit_puts_stale_ok()
with self.get() as conn:
return dict(conn.execute('''
SELECT account, created_at, put_timestamp, delete_timestamp,
status_changed_at, container_count, object_count,
bytes_used, hash, id
FROM account_stat
''').fetchone())
def list_containers_iter(self, limit, marker, end_marker, prefix,
delimiter):
"""
Get a list of containers sorted by name starting at marker onward, up
to limit entries. Entries will begin with the prefix and will not have
the delimiter after the prefix.
:param limit: maximum number of entries to get
:param marker: marker query
:param end_marker: end marker query
:param prefix: prefix query
:param delimiter: delimiter for query
:returns: list of tuples of (name, object_count, bytes_used, 0)
"""
(marker, end_marker, prefix, delimiter) = utf8encode(
marker, end_marker, prefix, delimiter)
self._commit_puts_stale_ok()
if delimiter and not prefix:
prefix = ''
orig_marker = marker
with self.get() as conn:
results = []
while len(results) < limit:
query = """
SELECT name, object_count, bytes_used, 0
FROM container
WHERE deleted = 0 AND """
query_args = []
if end_marker:
query += ' name < ? AND'
query_args.append(end_marker)
if marker and marker >= prefix:
query += ' name > ? AND'
query_args.append(marker)
elif prefix:
query += ' name >= ? AND'
query_args.append(prefix)
if self.get_db_version(conn) < 1:
query += ' +deleted = 0'
else:
query += ' deleted = 0'
query += ' ORDER BY name LIMIT ?'
query_args.append(limit - len(results))
curs = conn.execute(query, query_args)
curs.row_factory = None
if prefix is None:
# A delimiter without a specified prefix is ignored
return [r for r in curs]
if not delimiter:
if not prefix:
# It is possible to have a delimiter but no prefix
# specified. As above, the prefix will be set to the
# empty string, so avoid performing the extra work to
# check against an empty prefix.
return [r for r in curs]
else:
return [r for r in curs if r[0].startswith(prefix)]
# We have a delimiter and a prefix (possibly empty string) to
# handle
rowcount = 0
for row in curs:
rowcount += 1
marker = name = row[0]
if len(results) >= limit or not name.startswith(prefix):
curs.close()
return results
end = name.find(delimiter, len(prefix))
if end > 0:
marker = name[:end] + chr(ord(delimiter) + 1)
dir_name = name[:end + 1]
if dir_name != orig_marker:
results.append([dir_name, 0, 0, 1])
curs.close()
break
results.append(row)
if not rowcount:
break
return results
def merge_items(self, item_list, source=None):
"""
Merge items into the container table.
:param item_list: list of dictionaries of {'name', 'put_timestamp',
'delete_timestamp', 'object_count', 'bytes_used',
'deleted', 'storage_policy_index'}
:param source: if defined, update incoming_sync with the source
"""
def _really_merge_items(conn):
max_rowid = -1
curs = conn.cursor()
for rec in item_list:
record = [rec['name'], rec['put_timestamp'],
rec['delete_timestamp'], rec['object_count'],
rec['bytes_used'], rec['deleted'],
rec['storage_policy_index']]
query = '''
SELECT name, put_timestamp, delete_timestamp,
object_count, bytes_used, deleted,
storage_policy_index
FROM container WHERE name = ?
'''
if self.get_db_version(conn) >= 1:
query += ' AND deleted IN (0, 1)'
curs_row = curs.execute(query, (rec['name'],))
curs_row.row_factory = None
row = curs_row.fetchone()
if row:
row = list(row)
for i in xrange(5):
if record[i] is None and row[i] is not None:
record[i] = row[i]
if row[1] > record[1]: # Keep newest put_timestamp
record[1] = row[1]
if row[2] > record[2]: # Keep newest delete_timestamp
record[2] = row[2]
# If deleted, mark as such
if record[2] > record[1] and \
record[3] in (None, '', 0, '0'):
record[5] = 1
else:
record[5] = 0
curs.execute('''
DELETE FROM container WHERE name = ? AND
deleted IN (0, 1)
''', (record[0],))
curs.execute('''
INSERT INTO container (name, put_timestamp,
delete_timestamp, object_count, bytes_used,
deleted, storage_policy_index)
VALUES (?, ?, ?, ?, ?, ?, ?)
''', record)
if source:
max_rowid = max(max_rowid, rec['ROWID'])
if source:
try:
curs.execute('''
INSERT INTO incoming_sync (sync_point, remote_id)
VALUES (?, ?)
''', (max_rowid, source))
except sqlite3.IntegrityError:
curs.execute('''
UPDATE incoming_sync
SET sync_point=max(?, sync_point)
WHERE remote_id=?
''', (max_rowid, source))
conn.commit()
with self.get() as conn:
# create the policy stat table if needed and add spi to container
try:
_really_merge_items(conn)
except sqlite3.OperationalError as err:
if 'no such column: storage_policy_index' not in str(err):
raise
self._migrate_add_storage_policy_index(conn)
_really_merge_items(conn)
def _migrate_add_container_count(self, conn):
"""
Add the container_count column to the 'policy_stat' table and
update it
:param conn: DB connection object
"""
# add the container_count column
curs = conn.cursor()
curs.executescript('''
DROP TRIGGER container_delete_ps;
DROP TRIGGER container_insert_ps;
ALTER TABLE policy_stat
ADD COLUMN container_count INTEGER DEFAULT 0;
''' + POLICY_STAT_TRIGGER_SCRIPT)
# keep the simple case simple, if there's only one entry in the
# policy_stat table we just copy the total container count from the
# account_stat table
# if that triggers an update then the where changes <> 0 *would* exist
# and the insert or replace from the count subqueries won't execute
curs.executescript("""
UPDATE policy_stat
SET container_count = (
SELECT container_count
FROM account_stat)
WHERE (
SELECT COUNT(storage_policy_index)
FROM policy_stat
) <= 1;
INSERT OR REPLACE INTO policy_stat (
storage_policy_index,
container_count,
object_count,
bytes_used
)
SELECT p.storage_policy_index,
c.count,
p.object_count,
p.bytes_used
FROM (
SELECT storage_policy_index,
COUNT(*) as count
FROM container
WHERE deleted = 0
GROUP BY storage_policy_index
) c
JOIN policy_stat p
ON p.storage_policy_index = c.storage_policy_index
WHERE NOT EXISTS(
SELECT changes() as change
FROM policy_stat
WHERE change <> 0
);
""")
conn.commit()
def _migrate_add_storage_policy_index(self, conn):
"""
Add the storage_policy_index column to the 'container' table and
set up triggers, creating the policy_stat table if needed.
:param conn: DB connection object
"""
try:
self.create_policy_stat_table(conn)
except sqlite3.OperationalError as err:
if 'table policy_stat already exists' not in str(err):
raise
conn.executescript('''
ALTER TABLE container
ADD COLUMN storage_policy_index INTEGER DEFAULT 0;
''' + POLICY_STAT_TRIGGER_SCRIPT)
| apache-2.0 |
MattsFleaMarket/python-for-android | python-build/python-libs/gdata/tests/gdata_tests/calendar_test.py | 87 | 38211 | #!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Jeff Scudder)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata
from gdata import test_data
import gdata.calendar
class CalendarFeedTest(unittest.TestCase):
def setUp(self):
self.calendar_feed = gdata.calendar.CalendarListFeedFromString(
test_data.CALENDAR_FEED)
def testEntryCount(self):
# Assert the number of items in the feed of calendars
self.assertEquals(len(self.calendar_feed.entry),2)
def testToAndFromString(self):
# Assert the appropriate type for each entry
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarListEntry),
'Entry must be an instance of CalendarListEntry')
# Regenerate feed from xml text
new_calendar_feed = (
gdata.calendar.CalendarListFeedFromString(str(self.calendar_feed)))
for an_entry in new_calendar_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarListEntry),
'Entry in regenerated feed must be an instance of CalendarListEntry')
def testAuthor(self):
"""Tests the existence of a <atom:author> and verifies the name and email"""
# Assert that each element in the feed author list is an atom.Author
for an_author in self.calendar_feed.author:
self.assert_(isinstance(an_author, atom.Author),
"Calendar feed <atom:author> element must be an instance of " +
"atom.Author: %s" % an_author)
# Assert the feed author name is as expected
self.assertEquals(self.calendar_feed.author[0].name.text, 'GData Ops Demo')
# Assert the feed author name is as expected
self.assertEquals(self.calendar_feed.author[0].email.text,
'[email protected]')
# Assert one of the values for an entry author
self.assertEquals(self.calendar_feed.entry[0].author[0].name.text,
'GData Ops Demo')
self.assertEquals(self.calendar_feed.entry[0].author[0].email.text,
'[email protected]')
def testId(self):
"""Tests the existence of a <atom:id> in the feed and entries
and verifies the value"""
# Assert the feed id exists and is an atom.Id
self.assert_(isinstance(self.calendar_feed.id, atom.Id),
"Calendar feed <atom:id> element must be an instance of atom.Id: %s" % (
self.calendar_feed.id))
# Assert the feed id value is as expected
self.assertEquals(self.calendar_feed.id.text,
'http://www.google.com/calendar/feeds/default')
# Assert that each entry has an id which is an atom.Id
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.id, atom.Id),
"Calendar entry <atom:id> element must be an instance of " +
"atom.Id: %s" % an_entry.id)
# Assert one of the values for an id
self.assertEquals(self.calendar_feed.entry[1].id.text,
'http://www.google.com/calendar/feeds/default/' +
'jnh21ovnjgfph21h32gvms2758%40group.calendar.google.com')
def testPublished(self):
"""Tests the existence of a <atom:published> in the entries
and verifies the value"""
# Assert that each entry has a published value which is an atom.Published
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.published, atom.Published),
"Calendar entry <atom:published> element must be an instance of " +
"atom.Published: %s" % an_entry.published)
# Assert one of the values for published is as expected
self.assertEquals(self.calendar_feed.entry[1].published.text,
'2007-03-20T22:48:57.837Z')
def testUpdated(self):
"""Tests the existence of a <atom:updated> in the feed and the entries
and verifies the value"""
# Assert that the feed updated element exists and is an atom.Updated
self.assert_(isinstance(self.calendar_feed.updated, atom.Updated),
"Calendar feed <atom:updated> element must be an instance of " +
"atom.Updated: %s" % self.calendar_feed.updated)
# Assert that each entry has a updated value which is an atom.Updated
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.updated, atom.Updated),
"Calendar entry <atom:updated> element must be an instance of" +
"atom.Updated: %s" % an_entry.updated)
# Assert the feed updated value is as expected
self.assertEquals(self.calendar_feed.updated.text,
'2007-03-20T22:48:57.833Z')
# Assert one of the values for updated
self.assertEquals(self.calendar_feed.entry[0].updated.text,
'2007-03-20T22:48:52.000Z')
def testTitle(self):
"""Tests the existence of a <atom:title> in the feed and the entries and
verifies the value"""
# Assert that the feed title element exists and is an atom.Title
self.assert_(isinstance(self.calendar_feed.title, atom.Title),
"Calendar feed <atom:title> element must be an instance of " +
"atom.Title: %s" % self.calendar_feed.title)
# Assert that each entry has a title value which is an atom.Title
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.title, atom.Title),
"Calendar entry <atom:title> element must be an instance of " +
"atom.Title: %s" % an_entry.title)
# Assert the feed title value is as expected
self.assertEquals(self.calendar_feed.title.text,
'GData Ops Demo\'s Calendar List')
# Assert one of the values for title
self.assertEquals(self.calendar_feed.entry[0].title.text, 'GData Ops Demo')
def testColor(self):
"""Tests the existence of a <gCal:color> and verifies the value"""
# Assert the color is present and is a gdata.calendar.Color
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.color, gdata.calendar.Color),
"Calendar feed <gCal:color> element must be an instance of " +
"gdata.calendar.Color: %s" % an_entry.color)
# Assert the color value is as expected
self.assertEquals(self.calendar_feed.entry[0].color.value, '#2952A3')
def testAccessLevel(self):
"""Tests the existence of a <gCal:accesslevel> element and verifies the
value"""
# Assert the access_level is present and is a gdata.calendar.AccessLevel
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.access_level, gdata.calendar.AccessLevel),
"Calendar feed <gCal:accesslevel> element must be an instance of " +
"gdata.calendar.AccessLevel: %s" % an_entry.access_level)
# Assert the access_level value is as expected
self.assertEquals(self.calendar_feed.entry[0].access_level.value, 'owner')
def testTimezone(self):
"""Tests the existence of a <gCal:timezone> element and verifies the
value"""
# Assert the timezone is present and is a gdata.calendar.Timezone
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.timezone, gdata.calendar.Timezone),
"Calendar feed <gCal:timezone> element must be an instance of " +
"gdata.calendar.Timezone: %s" % an_entry.timezone)
# Assert the timezone value is as expected
self.assertEquals(self.calendar_feed.entry[0].timezone.value,
'America/Los_Angeles')
def testHidden(self):
"""Tests the existence of a <gCal:hidden> element and verifies the
value"""
# Assert the hidden is present and is a gdata.calendar.Hidden
for an_entry in self.calendar_feed.entry:
self.assert_(isinstance(an_entry.hidden, gdata.calendar.Hidden),
"Calendar feed <gCal:hidden> element must be an instance of " +
"gdata.calendar.Hidden: %s" % an_entry.hidden)
# Assert the hidden value is as expected
self.assertEquals(self.calendar_feed.entry[0].hidden.value, 'false')
def testOpenSearch(self):
"""Tests the existence of <openSearch:startIndex>"""
# Assert that the elements exist and are the appropriate type
self.assert_(isinstance(self.calendar_feed.start_index, gdata.StartIndex),
"Calendar feed <openSearch:startIndex> element must be an " +
"instance of gdata.StartIndex: %s" % self.calendar_feed.start_index)
# Assert the values for each openSearch element are as expected
self.assertEquals(self.calendar_feed.start_index.text, '1')
def testGenerator(self):
"""Tests the existence of <atom:generator> and verifies the value"""
# Assert that the element exists and is of the appropriate type
self.assert_(isinstance(self.calendar_feed.generator, atom.Generator),
"Calendar feed <atom:generator> element must be an instance of " +
"atom.Generator: %s" % self.calendar_feed.generator)
# Assert the generator version, uri and text are as expected
self.assertEquals(self.calendar_feed.generator.text, 'Google Calendar')
self.assertEquals(self.calendar_feed.generator.version, '1.0')
self.assertEquals(self.calendar_feed.generator.uri,
'http://www.google.com/calendar')
def testEntryLink(self):
"""Makes sure entry links in the private composite feed are parsed."""
entry = gdata.calendar.CalendarEventEntryFromString(
test_data.RECURRENCE_EXCEPTION_ENTRY)
self.assert_(isinstance(entry.recurrence_exception, list))
self.assert_(isinstance(entry.recurrence_exception[0].entry_link,
gdata.EntryLink))
self.assert_(isinstance(entry.recurrence_exception[0].entry_link.entry,
gdata.calendar.CalendarEventEntry))
self.assertEquals(
entry.recurrence_exception[0].entry_link.entry.author[0].name.text,
'gdata ops')
def testSequence(self):
entry = gdata.calendar.CalendarEventEntry(
sequence=gdata.calendar.Sequence(value='1'))
entry2 = gdata.calendar.CalendarEventEntryFromString(str(entry))
self.assertEqual(entry.sequence.value, entry2.sequence.value)
entry = gdata.calendar.CalendarEventEntryFromString(
'<entry xmlns="%s"><sequence xmlns="%s" value="7" /></entry>' % (
atom.ATOM_NAMESPACE, gdata.calendar.GCAL_NAMESPACE))
self.assertEqual(entry.sequence.value, '7')
def testOriginalEntry(self):
"""Make sure original entry in the private composite feed are parsed."""
entry = gdata.calendar.CalendarEventEntryFromString(
test_data.RECURRENCE_EXCEPTION_ENTRY)
self.assertEquals(
entry.recurrence_exception[0].entry_link.entry.original_event.id,
'i7lgfj69mjqjgnodklif3vbm7g')
class CalendarFeedTestRegenerated(CalendarFeedTest):
def setUp(self):
old_calendar_feed = (
gdata.calendar.CalendarListFeedFromString(test_data.CALENDAR_FEED))
self.calendar_feed = (
gdata.calendar.CalendarListFeedFromString(str(old_calendar_feed)))
tree = ElementTree.fromstring(str(old_calendar_feed))
class CalendarEventFeedTest(unittest.TestCase):
def setUp(self):
self.calendar_event_feed = (
gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_FULL_EVENT_FEED))
def testEntryCount(self):
# Assert the number of items in the feed of events
self.assertEquals(len(self.calendar_event_feed.entry),11)
def testToAndFromString(self):
# Assert the appropriate type for each entry
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarEventEntry),
"Entry must be an instance of a CalendarEventEntry")
# Regenerate feed from xml text
new_calendar_event_feed = gdata.calendar.CalendarEventFeedFromString(
str(self.calendar_event_feed))
for an_entry in new_calendar_event_feed.entry:
self.assert_(isinstance(an_entry, gdata.calendar.CalendarEventEntry),
"Entry in regenerated feed must be an instance of CalendarEventEntry")
def testAuthor(self):
"""Tests the existence of a <atom:author> and verifies the name and email"""
# Assert that each element in the feed author list is an atom.Author
for an_author in self.calendar_event_feed.author:
self.assert_(isinstance(an_author, atom.Author),
"Calendar event feed <atom:author> element must be an instance of " +
"atom.Author: %s" % an_author)
# Assert the feed author name is as expected
self.assertEquals(self.calendar_event_feed.author[0].name.text,
'GData Ops Demo')
# Assert the feed author name is as expected
self.assertEquals(self.calendar_event_feed.author[0].email.text,
'[email protected]')
# Assert one of the values for an entry author
self.assertEquals(self.calendar_event_feed.entry[0].author[0].name.text,
'GData Ops Demo')
self.assertEquals(self.calendar_event_feed.entry[0].author[0].email.text,
'[email protected]')
def testId(self):
"""Tests the existence of a <atom:id> in the feed and entries and
verifies the value"""
# Assert the feed id exists and is an atom.Id
self.assert_(isinstance(self.calendar_event_feed.id, atom.Id),
"Calendar event feed <atom:id> element must be an instance of " +
"atom.Id: %s" % self.calendar_event_feed.id)
# Assert the feed id value is as expected
self.assertEquals(self.calendar_event_feed.id.text,
'http://www.google.com/calendar/feeds/default/private/full')
# Assert that each entry has an id which is an atom.Id
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.id, atom.Id),
"Calendar event entry <atom:id> element must be an " +
"instance of atom.Id: %s" % an_entry.id)
# Assert one of the values for an id
self.assertEquals(self.calendar_event_feed.entry[1].id.text,
'http://www.google.com/calendar/feeds/default/private/full/' +
'2qt3ao5hbaq7m9igr5ak9esjo0')
def testPublished(self):
"""Tests the existence of a <atom:published> in the entries and
verifies the value"""
# Assert that each entry has a published value which is an atom.Published
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.published, atom.Published),
"Calendar event entry <atom:published> element must be an instance " +
"of atom.Published: %s" % an_entry.published)
# Assert one of the values for published is as expected
self.assertEquals(self.calendar_event_feed.entry[1].published.text,
'2007-03-20T21:26:04.000Z')
def testUpdated(self):
"""Tests the existence of a <atom:updated> in the feed and the entries and
verifies the value"""
# Assert that the feed updated element exists and is an atom.Updated
self.assert_(isinstance(self.calendar_event_feed.updated, atom.Updated),
"Calendar feed <atom:updated> element must be an instance of " +
"atom.Updated: %s" % self.calendar_event_feed.updated)
# Assert that each entry has a updated value which is an atom.Updated
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.updated, atom.Updated),
"Calendar event entry <atom:updated> element must be an instance " +
"of atom.Updated: %s" % an_entry.updated)
# Assert the feed updated value is as expected
self.assertEquals(self.calendar_event_feed.updated.text,
'2007-03-20T21:29:57.000Z')
# Assert one of the values for updated
self.assertEquals(self.calendar_event_feed.entry[3].updated.text,
'2007-03-20T21:25:46.000Z')
def testTitle(self):
"""Tests the existence of a <atom:title> in the feed and the entries
and verifies the value"""
# Assert that the feed title element exists and is an atom.Title
self.assert_(isinstance(self.calendar_event_feed.title, atom.Title),
"Calendar feed <atom:title> element must be an instance of " +
"atom.Title: %s" % self.calendar_event_feed.title)
# Assert that each entry has a title value which is an atom.Title
for an_entry in self.calendar_event_feed.entry:
self.assert_(isinstance(an_entry.title, atom.Title),
"Calendar event entry <atom:title> element must be an instance of " +
"atom.Title: %s" % an_entry.title)
# Assert the feed title value is as expected
self.assertEquals(self.calendar_event_feed.title.text, 'GData Ops Demo')
# Assert one of the values for title
self.assertEquals(self.calendar_event_feed.entry[0].title.text,
'test deleted')
def testPostLink(self):
"""Tests the existence of a <atom:link> with a rel='...#post'
and verifies the value"""
# Assert that each link in the feed is an atom.Link
for a_link in self.calendar_event_feed.link:
self.assert_(isinstance(a_link, atom.Link),
"Calendar event entry <atom:link> element must be an instance of " +
"atom.Link: %s" % a_link)
# Assert post link exists
self.assert_(self.calendar_event_feed.GetPostLink() is not None)
# Assert the post link value is as expected
self.assertEquals(self.calendar_event_feed.GetPostLink().href,
'http://www.google.com/calendar/feeds/default/private/full')
def testEditLink(self):
"""Tests the existence of a <atom:link> with a rel='edit' in each entry
and verifies the value"""
# Assert that each link in the feed is an atom.Link
for a_link in self.calendar_event_feed.link:
self.assert_(isinstance(a_link, atom.Link),
"Calendar event entry <atom:link> element must be an instance of " +
"atom.Link: %s" % a_link)
# Assert edit link exists
for a_entry in self.calendar_event_feed.entry:
self.assert_(a_entry.GetEditLink() is not None)
# Assert the edit link value is as expected
self.assertEquals(self.calendar_event_feed.entry[0].GetEditLink().href,
'http://www.google.com/calendar/feeds/default/private/full/o99flmgm' +
'kfkfrr8u745ghr3100/63310109397')
self.assertEquals(self.calendar_event_feed.entry[0].GetEditLink().type,
'application/atom+xml')
def testOpenSearch(self):
"""Tests the existence of <openSearch:totalResults>,
<openSearch:startIndex>, <openSearch:itemsPerPage>"""
# Assert that the elements exist and are the appropriate type
self.assert_(isinstance(self.calendar_event_feed.total_results,
gdata.TotalResults),
"Calendar event feed <openSearch:totalResults> element must be an " +
"instance of gdata.TotalResults: %s" % (
self.calendar_event_feed.total_results))
self.assert_(
isinstance(self.calendar_event_feed.start_index, gdata.StartIndex),
"Calendar event feed <openSearch:startIndex> element must be an " +
"instance of gdata.StartIndex: %s" % (
self.calendar_event_feed.start_index))
self.assert_(
isinstance(self.calendar_event_feed.items_per_page, gdata.ItemsPerPage),
"Calendar event feed <openSearch:itemsPerPage> element must be an " +
"instance of gdata.ItemsPerPage: %s" % (
self.calendar_event_feed.items_per_page))
# Assert the values for each openSearch element are as expected
self.assertEquals(self.calendar_event_feed.total_results.text, '10')
self.assertEquals(self.calendar_event_feed.start_index.text, '1')
self.assertEquals(self.calendar_event_feed.items_per_page.text, '25')
def testGenerator(self):
"""Tests the existence of <atom:generator> and verifies the value"""
# Assert that the element exists and is of the appropriate type
self.assert_(isinstance(self.calendar_event_feed.generator, atom.Generator),
"Calendar event feed <atom:generator> element must be an instance " +
"of atom.Generator: %s" % self.calendar_event_feed.generator)
# Assert the generator version, uri and text are as expected
self.assertEquals(self.calendar_event_feed.generator.text,
'Google Calendar')
self.assertEquals(self.calendar_event_feed.generator.version, '1.0')
self.assertEquals(self.calendar_event_feed.generator.uri,
'http://www.google.com/calendar')
def testCategory(self):
"""Tests the existence of <atom:category> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for a_category in self.calendar_event_feed.category:
self.assert_(isinstance(a_category, atom.Category),
"Calendar event feed <atom:category> element must be an instance " +
"of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
'http://schemas.google.com/g/2005#kind')
self.assertEquals(a_category.term,
'http://schemas.google.com/g/2005#event')
for an_event in self.calendar_event_feed.entry:
for a_category in an_event.category:
self.assert_(isinstance(a_category, atom.Category),
"Calendar event feed entry <atom:category> element must be an " +
"instance of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
'http://schemas.google.com/g/2005#kind')
self.assertEquals(a_category.term,
'http://schemas.google.com/g/2005#event')
def testSendEventNotifications(self):
"""Test the existence of <gCal:sendEventNotifications>
and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.send_event_notifications,
gdata.calendar.SendEventNotifications),
("Calendar event feed entry <gCal:sendEventNotifications> element " +
"must be an instance of gdata.calendar.SendEventNotifications: %s") % (
an_event.send_event_notifications,))
# Assert the <gCal:sendEventNotifications> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].send_event_notifications.value,
'false')
self.assertEquals(
self.calendar_event_feed.entry[2].send_event_notifications.value,
'true')
def testQuickAdd(self):
"""Test the existence of <gCal:quickadd>
and verifies the value"""
entry = gdata.calendar.CalendarEventEntry()
entry.quick_add = gdata.calendar.QuickAdd(value='true')
unmarshalled_entry = entry.ToString()
tag = '{%s}quickadd' % (gdata.calendar.GCAL_NAMESPACE)
marshalled_entry = ElementTree.fromstring(unmarshalled_entry).find(tag)
self.assert_(marshalled_entry.attrib['value'],'true')
self.assert_(marshalled_entry.tag,tag)
def testEventStatus(self):
"""Test the existence of <gd:eventStatus>
and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.event_status,
gdata.calendar.EventStatus),
("Calendar event feed entry <gd:eventStatus> element " +
"must be an instance of gdata.calendar.EventStatus: %s") % (
an_event.event_status,))
# Assert the <gd:eventStatus> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].event_status.value,
'CANCELED')
self.assertEquals(
self.calendar_event_feed.entry[1].event_status.value,
'CONFIRMED')
def testComments(self):
"""Tests the existence of <atom:comments> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(an_event.comments is None or isinstance(an_event.comments,
gdata.calendar.Comments),
("Calendar event feed entry <gd:comments> element " +
"must be an instance of gdata.calendar.Comments: %s") % (
an_event.comments,))
def testVisibility(self):
"""Test the existence of <gd:visibility> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.visibility,
gdata.calendar.Visibility),
("Calendar event feed entry <gd:visibility> element " +
"must be an instance of gdata.calendar.Visibility: %s") % (
an_event.visibility,))
# Assert the <gd:visibility> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].visibility.value,
'DEFAULT')
self.assertEquals(
self.calendar_event_feed.entry[1].visibility.value,
'PRIVATE')
self.assertEquals(
self.calendar_event_feed.entry[2].visibility.value,
'PUBLIC')
def testTransparency(self):
"""Test the existence of <gd:transparency> and verifies the value"""
# Assert that the element exists and is of the appropriate type and value
for an_event in self.calendar_event_feed.entry:
self.assert_(isinstance(an_event.transparency,
gdata.calendar.Transparency),
("Calendar event feed entry <gd:transparency> element " +
"must be an instance of gdata.calendar.Transparency: %s") % (
an_event.transparency,))
# Assert the <gd:transparency> are as expected
self.assertEquals(
self.calendar_event_feed.entry[0].transparency.value,
'OPAQUE')
self.assertEquals(
self.calendar_event_feed.entry[1].transparency.value,
'OPAQUE')
self.assertEquals(
self.calendar_event_feed.entry[2].transparency.value,
'OPAQUE')
# TODO: TEST VALUES OF VISIBILITY OTHER THAN OPAQUE
def testWhere(self):
"""Tests the existence of a <gd:where> in the entries
and verifies the value"""
# Assert that each entry has a where value which is an gdata.calendar.Where
for an_entry in self.calendar_event_feed.entry:
for a_where in an_entry.where:
self.assert_(isinstance(a_where, gdata.calendar.Where),
"Calendar event entry <gd:where> element must be an instance of " +
"gdata.calendar.Where: %s" % a_where)
# Assert one of the values for where is as expected
self.assertEquals(self.calendar_event_feed.entry[1].where[0].value_string,
'Dolores Park with Kim')
def testWhenAndReminder(self):
"""Tests the existence of a <gd:when> and <gd:reminder> in the entries
and verifies the values"""
# Assert that each entry's when value is a gdata.calendar.When
# Assert that each reminder is a gdata.calendar.Reminder
for an_entry in self.calendar_event_feed.entry:
for a_when in an_entry.when:
self.assert_(isinstance(a_when, gdata.calendar.When),
"Calendar event entry <gd:when> element must be an instance " +
"of gdata.calendar.When: %s" % a_when)
for a_reminder in a_when.reminder:
self.assert_(isinstance(a_reminder, gdata.calendar.Reminder),
"Calendar event entry <gd:reminder> element must be an " +
"instance of gdata.calendar.Reminder: %s" % a_reminder)
# Assert one of the values for when is as expected
self.assertEquals(self.calendar_event_feed.entry[0].when[0].start_time,
'2007-03-23T12:00:00.000-07:00')
self.assertEquals(self.calendar_event_feed.entry[0].when[0].end_time,
'2007-03-23T13:00:00.000-07:00')
# Assert the reminder child of when is as expected
self.assertEquals(
self.calendar_event_feed.entry[0].when[0].reminder[0].minutes, '10')
self.assertEquals(
self.calendar_event_feed.entry[1].when[0].reminder[0].minutes, '20')
def testBatchRequestParsing(self):
batch_request = gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_BATCH_REQUEST)
self.assertEquals(len(batch_request.entry), 4)
# Iterate over the batch request entries and match the operation with
# the batch id. These values are hard coded to match the test data.
for entry in batch_request.entry:
if entry.batch_id.text == '1':
self.assertEquals(entry.batch_operation.type, 'insert')
if entry.batch_id.text == '2':
self.assertEquals(entry.batch_operation.type, 'query')
if entry.batch_id.text == '3':
self.assertEquals(entry.batch_operation.type, 'update')
self.assertEquals(entry.title.text, 'Event updated via batch')
if entry.batch_id.text == '4':
self.assertEquals(entry.batch_operation.type, 'delete')
self.assertEquals(entry.id.text,
'http://www.google.com/calendar/feeds/default/'
'private/full/d8qbg9egk1n6lhsgq1sjbqffqc')
self.assertEquals(entry.GetEditLink().href,
'http://www.google.com/calendar/feeds/default/'
'private/full/d8qbg9egk1n6lhsgq1sjbqffqc/'
'63326018324')
def testBatchResponseParsing(self):
batch_response = gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_BATCH_RESPONSE)
self.assertEquals(len(batch_response.entry), 4)
for entry in batch_response.entry:
if entry.batch_id.text == '1':
self.assertEquals(entry.batch_operation.type, 'insert')
self.assertEquals(entry.batch_status.code, '201')
self.assertEquals(entry.batch_status.reason, 'Created')
self.assertEquals(entry.id.text, 'http://www.google.com/calendar/'
'feeds/default/private/full/'
'n9ug78gd9tv53ppn4hdjvk68ek')
if entry.batch_id.text == '2':
self.assertEquals(entry.batch_operation.type, 'query')
if entry.batch_id.text == '3':
self.assertEquals(entry.batch_operation.type, 'update')
if entry.batch_id.text == '4':
self.assertEquals(entry.batch_operation.type, 'delete')
self.assertEquals(entry.id.text, 'http://www.google.com/calendar/'
'feeds/default/private/full/'
'd8qbg9egk1n6lhsgq1sjbqffqc')
# TODO add reminder tests for absolute_time and hours/seconds (if possible)
# TODO test recurrence and recurrenceexception
# TODO test originalEvent
class CalendarWebContentTest(unittest.TestCase):
def setUp(self):
self.calendar_event_feed = (
gdata.calendar.CalendarEventFeedFromString(
test_data.CALENDAR_FULL_EVENT_FEED))
def testAddSimpleWebContentEventEntry(self):
"""Verifies that we can add a web content link to an event entry."""
title = "Al Einstein's Birthday!"
href = 'http://gdata.ops.demo.googlepages.com/birthdayicon.gif'
type = 'image/jpeg'
url = 'http://gdata.ops.demo.googlepages.com/einstein.jpg'
width = '300'
height = '225'
# Create a web content event
event = gdata.calendar.CalendarEventEntry()
web_content = gdata.calendar.WebContent(url=url, width=width, height=height)
web_content_link = gdata.calendar.WebContentLink(title=title,
href=href, link_type=type, web_content=web_content)
event.link.append(web_content_link)
# Verify the web content link exists and contains the expected data
web_content_link = event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidSimpleWebContent(url, width, height, web_content_element)
def testAddWebContentGadgetEventEntry(self):
"""Verifies that we can add a web content gadget link to an event entry."""
title = "Date and Time Gadget"
href = 'http://gdata.ops.demo.googlepages.com/birthdayicon.gif'
url = 'http://google.com/ig/modules/datetime.xml'
type = 'application/x-google-gadgets+xml'
width = '300'
height = '200'
pref_name = 'color'
pref_value = 'green'
# Create a web content event
event = gdata.calendar.CalendarEventEntry()
web_content = gdata.calendar.WebContent(url=url, width=width, height=height)
web_content.gadget_pref.append(
gdata.calendar.WebContentGadgetPref(name=pref_name, value=pref_value))
web_content_link = gdata.calendar.WebContentLink(title=title,
href=href, web_content=web_content, link_type=type)
event.link.append(web_content_link)
# Verify the web content link exists and contains the expected data
web_content_link = event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidWebContentGadget(url, width, height,
pref_name, pref_value, web_content_element)
def testFromXmlToSimpleWebContent(self):
"""Verifies that we can read a web content link from an event entry."""
# Expected values (from test_data.py file)
title = 'World Cup'
href = 'http://www.google.com/calendar/images/google-holiday.gif'
type = 'image/gif'
url = 'http://www.google.com/logos/worldcup06.gif'
width = '276'
height = '120'
# Note: The tenth event entry contains web content
web_content_event = self.calendar_event_feed.entry[9]
# Verify the web content link exists and contains the expected data
web_content_link = web_content_event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidSimpleWebContent(url, width, height, web_content_element)
def testFromXmlToWebContentGadget(self):
"""Verifies that we can read a web content link from an event entry."""
# Expected values (from test_data.py file)
title = 'Date and Time Gadget'
href = 'http://gdata.ops.demo.googlepages.com/birthdayicon.gif'
url = 'http://google.com/ig/modules/datetime.xml'
type = 'application/x-google-gadgets+xml'
width = '300'
height = '136'
pref_name = 'color'
pref_value = 'green'
# Note: The eleventh event entry contains web content
web_content_event = self.calendar_event_feed.entry[10]
# Verify the web content link exists and contains the expected data
web_content_link = web_content_event.GetWebContentLink()
self.assertValidWebContentLink(title, href, type, web_content_link)
# Verify the web content element exists and contains the expected data
web_content_element = web_content_link.web_content
self.assertValidWebContentGadget(url, width, height, pref_name,
pref_value, web_content_element)
def assertValidWebContentLink(self, expected_title=None, expected_href=None,
expected_type=None, web_content_link=None):
"""Asserts that the web content link is the correct type and contains the
expected values"""
self.assert_(isinstance(web_content_link, gdata.calendar.WebContentLink),
"Web content link element must be an " +
"instance of gdata.calendar.WebContentLink: %s" % web_content_link)
expected_rel = '%s/%s' % (gdata.calendar.GCAL_NAMESPACE, 'webContent')
self.assertEquals(expected_rel, web_content_link.rel)
self.assertEqual(expected_title, web_content_link.title)
self.assertEqual(expected_href, web_content_link.href)
self.assertEqual(expected_type, web_content_link.type)
def assertValidSimpleWebContent(self, expected_url=None, expected_width=None,
expected_height=None, web_content_element=None):
"""Asserts that the web content element is the correct type and contains
the expected values"""
self.assert_(isinstance(web_content_element, gdata.calendar.WebContent),
"Calendar event entry <gCal:webContent> element must be an " +
"instance of gdata.calendar.WebContent: %s" % web_content_element)
self.assertEquals(expected_width, web_content_element.width)
self.assertEquals(expected_height, web_content_element.height)
self.assertEquals(expected_url, web_content_element.url)
def assertValidWebContentGadget(self, expected_url=None, expected_width=None,
expected_height=None, expected_pref_name=None, expected_pref_value=None,
web_content_element=None):
"""Asserts that the web content element is the correct type and contains
the expected values"""
self.assert_(isinstance(web_content_element, gdata.calendar.WebContent),
"Calendar event entry <gCal:webContent> element must be an " +
"instance of gdata.calendar.WebContent: %s" % web_content_element)
self.assertEquals(expected_width, web_content_element.width)
self.assertEquals(expected_height, web_content_element.height)
self.assertEquals(expected_url, web_content_element.url)
self.assertEquals(expected_pref_name,
web_content_element.gadget_pref[0].name)
self.assertEquals(expected_pref_value,
web_content_element.gadget_pref[0].value)
class ExtendedPropertyTest(unittest.TestCase):
def testExtendedPropertyToAndFromXml(self):
ep = gdata.calendar.ExtendedProperty(name='test')
ep.value = 'val'
xml_string = ep.ToString()
ep2 = gdata.ExtendedPropertyFromString(xml_string)
self.assertEquals(ep.name, ep2.name)
self.assertEquals(ep.value, ep2.value)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
goulu/Goulib | tests/test_Goulib_itertools2.py | 1 | 16319 | #!/usr/bin/env python
# coding: utf8
from nose.tools import assert_equal, assert_not_equals
from nose import SkipTest
#lines above are inserted automatically by pythoscope. Line below overrides them
from Goulib.tests import *
from Goulib.itertools2 import *
class TestTake:
def test_take(self):
assert_equal(take(3, irange(1,10)),[1,2,3])
class TestIndex:
def test_index(self):
assert_equal(index(4, irange(1,10)),3)
assert_equal(index(9, irange(1,10)),8)
class TestFirst:
def test_first(self):
assert_equal(first(irange(1,10)),1)
assert_equal(first('abc'),'a')
class TestLast:
def test_last(self):
assert_equal(last(irange(1,10)),10)
class TestTakeEvery:
def test_take_every(self):
assert_equal(every(2, irange(1,10)),[1,3,5,7,9])
assert_equal(takeevery(3,irange(1,10)), [1,4,7,10])
class TestDrop:
def test_drop(self):
assert_equal(drop(5, irange(1,10)),[6,7,8,9,10])
class TestIlen:
def test_ilen(self):
assert_equal(ilen(irange(10,0)),0)
assert_equal(ilen(irange(11,20)),10)
class TestIrange:
def test_irange(self):
assert_equal(irange(1,5),[1,2,3,4,5])
class TestArange:
def test_arange(self):
assert_equal(arange(-1,2.5,.5),[-1,-0.5,0,0.5,1,1.5,2])
assert_equal(arange(2,-1.5,.5),reversed([-1,-0.5,0,0.5,1,1.5,2]))
l=list(arange(1,step=.01))
assert_equal(len(l),100)
class TestLinspace:
def test_linspace(self):
assert_equal(linspace(-1,2,7),[-1,-0.5,0,0.5,1,1.5,2])
assert_equal(linspace(1,1,7),[1,1,1,1,1,1,1])
assert_equal(linspace((1,0),(0,1),3),[(1,0),(.5,.5),(0,1)])
class TestFlatten:
def test_flatten(self):
f=list(flatten([[1,2],[3]]))
assert_equal(f,[1,2,3])
assert_equal(flatten([1,[2,[3]]]),[1,2,3])
assert_equal(flatten(['a',['bc']]),['a','bc']) #do not recurse in strings
assert_equal(flatten([[[1],(2,[3])]],(tuple)),[1,(2,[3])]) # do not recurse in tuple
d=dict(enumerate(range(10)))
assert_equal(flatten(d),range(10))
class TestGroups:
def test_groups(self):
assert_equal(groups(irange(1,6),3,2),[[1,2,3],[3,4,5]])
assert_equal(groups([1,2,3,4,5,6],3,2),[[1,2,3],[3,4,5]])
assert_equal(groups([1,2,3,4,5,6],3),[[1,2,3],[4,5,6]])
assert_equal(groups([1,2,3,4,5,6],4),[[1,2,3,4]])
class TestReshape:
def test_reshape(self):
data=[1,[2,[3,4],[5,6,7]]] #data can have any shape...
assert_equal(reshape(data,(2,3)),[[1,2,3],[4,5,6]])
assert_equal(reshape(data,(3,2)),[[1,2],[3,4],[5,6]])
assert_equal(reshape(data,(3,3)),[[1,2,3],[4,5,6],[7]])
class TestCompose:
def test_compose(self):
from math import sin
f=compose(sin, lambda x:x*x)
assert_equal(f(2),sin(4))
class TestIterate:
def test_iterate(self):
assert_equal(take(4,iterate(lambda x:x*x, 2)), [2,4,16,16*16])
class TestIsIterable:
def test_isiterable(self):
assert_false(isiterable(123))
assert_false(isiterable('a string'))
assert_true(isiterable([]))
assert_true(isiterable(tuple()))
assert_true(isiterable({}))
assert_true(isiterable(set()))
assert_true(isiterable((x for x in range(10))))
assert_true(isiterable(map(lambda x:x*x,[1,2,3])))
class TestTails:
def test_tails(self):
assert_equal(tails([1,2,3]),[[1,2,3], [2,3], [3], []])
class TestIreduce:
def test_ireduce(self):
import operator
assert_equal(ireduce(operator.add, irange(10)),[1,3,6,10,15,21,28,36,45,55])
assert_equal(ireduce(operator.add, irange(10),2),[2,2,3,5,8,12,17,23,30,38,47,57])
class TestCompress:
def test_compress(self):
assert_equal(compress('AAAABBBCCDAABBB'),[('A', 4),('B', 3),('C', 2),('D', 1),('A', 2),('B', 3)])
# https://www.linkedin.com/groups/25827/25827-6166706414627627011
res=compress('aaaaabbbbccccccaaaaaaa')
res=''.join('%d%s'%(n,c) for (c,n) in res)
assert_equal(res,'5a4b6c7a')
class TestDecompress:
def test_decompress(self):
data='aaaaabbbbccccccaaaaaaa';
res=compress(data)
data2=decompress(res)
assert_equal(data2,data)
class TestUnique:
def test_unique(self):
assert_equal(unique('AAAABBBCCDAABBB'),'ABCD')
assert_equal(unique('ABBCcAD', str.upper),'ABCD')
assert_equal(unique('ZZZZBBBCCDAABBB',buffer=1),'ZBCDAB')
# harmless regression ...
# s=list(unique('AAAABBBCCDAABBB',buffer=4))
# assert_equal(s,'ABCD')
class TestIdentity:
def test_identity(self):
x=object()
assert_equal(identity(x),x)
class TestAny:
def test_any(self):
assert_true(any((1,2,3,4),lambda x:x>3))
assert_false(any((1,2,3,4),lambda x:x>4))
class TestAll:
def test_all(self):
assert_true(all((1,2,3,4),lambda x:x<5))
assert_false(all((1,2,3,4),lambda x:x<4))
class TestNo:
def test_no(self):
assert_true(no((1,2,3,4),lambda x:x<1))
assert_false(no((1,2,3,4),lambda x:x<2))
class TestTakenth:
def test_takenth(self):
#http://stackoverflow.com/questions/12007820/better-ways-to-get-nth-element-from-an-unsubscriptable-iterable
from itertools import permutations
assert_equal(nth(1000,permutations(range(10), 10)),
(0, 1, 2, 4, 6, 5, 8, 9, 3, 7)
)
class TestIcross:
def test_icross(self):
assert_equal(icross([1,2,5],[2,3]),
[(1,2),(1,3),(2,2),(2,3),(5,2),(5,3)]
)
class TestQuantify:
def test_quantify(self):
from Goulib.math2 import is_pentagonal
assert_equal(quantify(irange(1,100), is_pentagonal),8)
class TestPairwise:
def test_pairwise(self):
assert_equal(pairwise([1,2,3]),[(1,2),(2,3)])
assert_equal(pairwise([1,2,3],operator.add),[3,5])
assert_equal(pairwise([1,2,3],loop=True),[(1,2),(2,3),(3,1)])
assert_equal(pairwise([1,2,3],operator.add,loop=True),[3,5,4])
assert_equal(pairwise([]),[])
assert_equal(pairwise([1]),[])
assert_equal(pairwise([1],loop=True),[(1,1)])
class TestInterleave:
def test_interleave(self):
assert_equal(interleave([0,2,4],[1,3,5]),[0,1,2,3,4,5])
assert_equal(interleave([0,2,4],[1,3]),[0,1,2,3,4])
assert_equal(interleave([0],[]),[0])
class TestRandSeq:
def test_rand_seq(self):
# assert_equal(expected, rand_seq(size))
raise SkipTest
class TestAllPairs:
def test_all_pairs(self):
# assert_equal(expected, all_pairs(size))
raise SkipTest
class TestFilter2:
def test_filter2(self):
yes,no=filter2([1,2,3,4,3,2,1],lambda x:x<3)
assert_equal(yes,[1,2,2,1])
assert_equal(no,[3,4,3])
class TestIfind:
def test_ifind(self):
pass #tested below
class TestFind:
def test_find(self):
assert_equal(find([0,1,2,3,4],lambda x:x>2),(3,3))
class TestIsplit:
def test_isplit(self):
pass #tested below
class TestSplit:
def test_split(self):
assert_equal(split([0,1,2,-1,3,4,5], lambda x:x<0),[[0,1,2],[3,4,5]])
assert_equal(split([-1,0,1,2,-1,3,4,5,-1], lambda x:x<0),[[],[0,1,2],[3,4,5],[]])
assert_equal(split([-1,0,1,2,-1,3,4,5,-1], lambda x:x<0,True),[[],[-1,0,1,2],[-1,3,4,5],[-1]])
class TestNextPermutation:
def test_next_permutation(self):
res=take(10,next_permutation(list('hello')))
res=[''.join(x) for x in res]
res=','.join(res)
assert_equal(res,'hello,helol,heoll,hlelo,hleol,hlleo,hlloe,hloel,hlole,hoell')
class TestIter2:
def test___add__(self):
i1 = iter2(irange(1,5))
i2 = iter2(irange(6,10))
assert_equal(i1+i2,range(1,11))
def test___init__(self):
# iter2 = iter2(iterable)
raise SkipTest
def test___iter__(self):
# iter2 = iter2(iterable)
# assert_equal(expected, iter2.__iter__())
raise SkipTest
def test_append(self):
# iter2 = iter2(iterable)
# assert_equal(expected, iter2.append(iterable))
raise SkipTest
def test_insert(self):
# iter2 = iter2(iterable)
# assert_equal(expected, iter2.insert(place, iterable))
raise SkipTest
def test_next(self):
# iter2 = iter2(iterable)
# assert_equal(expected, iter2.next())
raise SkipTest
def test___next__(self):
# iter2 = iter2(iterable)
# assert_equal(expected, iter2.__next__())
raise SkipTest
class TestProduct:
def test_product(self):
#test compatibility with itertools.product
assert_equal(itertools2.product(),itertools.product())
assert_equal(itertools2.product([]),itertools.product([]))
assert_equal(itertools2.product('ABCD', 'xy'),itertools.product('ABCD', 'xy'))
# assert_equal(itertools2.product('AB', 'wxyz'),itertools.product('AB', 'wxyz'))
assert_equal(itertools2.product(range(2), repeat=3),itertools.product(range(2), repeat=3))
#test case from http://stackoverflow.com/questions/12093364/cartesian-product-of-large-iterators-itertools
g = product(itertools.permutations(range(100)),repeat=2)
assert_equal(next(g),(range(100),range(100)))
class TestCombinationsWithReplacement:
def test_combinations_with_replacement(self):
assert_equal(combinations_with_replacement('ABC', 2),
['AA','AB','BB','AC','BC','CC'])
assert_equal(combinations_with_replacement('AB', 4),
['AAAA','AAAB','AABB','ABBB','BBBB'])
class TestCountUnique:
def test_count_unique(self):
assert_equal(count_unique('AAAABBBCCDAABBB'),4)
assert_equal(count_unique('ABBCcAD', str.lower),4)
class TestBest:
def test_best(self):
assert_equal(best([3,2,1,2,1]),[1,1])
assert_equal(best([3,2,1,2,1],reverse=True,n=2),[3,2,2])
class TestRemovef:
def test_removef(self):
l=[0,1,'a',None,3.14,[]]
r=removef(l,lambda x:True if not x else False)
assert_equal(r,[0,None,[]])
assert_equal(l,[1,'a',3.14])
class TestShuffle:
def test_shuffle(self):
s1=list("hello world")
s2=shuffle(list("hello world")) #copy, as shuffle works in place
assert_not_equal(s1,s2) #would really be bad luck ...
assert_equal(occurences(s1),occurences(s2))
class TestIndexMin:
def test_index_min(self):
assert_equal(index_min("hallo~welt"),(1,'a'))
class TestIndexMax:
def test_index_max(self):
assert_equal(index_max("hello world"),(6,'w'))
class TestTakeevery:
def test_takeevery(self):
# assert_equal(expected, takeevery(n, iterable))
raise SkipTest
class TestSortIndexes:
def test_sort_indexes(self):
# assert_equal(expected, sort_indexes(iterable, key, reverse))
raise SkipTest
class TestSubdict:
def test_subdict(self):
# assert_equal(expected, subdict(d, keys))
raise SkipTest
class TestAccumulate:
def test_accumulate(self):
# assert_equal(expected, accumulate(iterable, func, skip_first))
raise SkipTest
class TestDiff:
def test_diff(self):
# assert_equal(expected, diff(iterable1, iterable2))
raise SkipTest
class TestSortedIterable:
def test_sorted_iterable(self):
data=[1,2,3,7,6,5,4]
res=sorted(data)
#with a small buffer, it fails
def test(iterable,buffer,key=None):
return [x for x in ensure_sorted(
sorted_iterable(iterable,key=key, buffer=buffer)
,key=key)]
assert_raises(SortingError,test,data,3)
#with a larger one, it's ok
assert_equal(test(data,buffer=4),res)
class TestIsiterable:
def test_isiterable(self):
assert_true(isiterable(list()))
assert_true(isiterable(tuple()))
assert_true(isiterable(range(1000)))
assert_false(isiterable(''))
class TestItemgetter:
def test_itemgetter(self):
# assert_equal(expected, itemgetter(iterable, i))
raise SkipTest
class TestTee:
def test_tee(self):
it=itertools.count()
it,it1,it2=tee(it,n=3)
assert_equal(next(it1),next(it2))
assert_equal(next(it1),next(it2))
assert_equal(next(it),0)
class TestIremove:
def test_iremove(self):
# assert_equal(expected, iremove(iterable, f))
raise SkipTest
class TestDictsplit:
def test_dictsplit(self):
# assert_equal(expected, dictsplit(dic, keys))
raise SkipTest
class TestShape:
def test_shape(self):
data=[[[5,6,7],2,[3,4]],1] #data can have any shape...
assert_equal(shape(data),(2,3,3)) #... but shape is evaluated from [0]
class TestNdim:
def test_ndim(self):
data=[[[5,6,7],2,[3,4]],1] #data can have any shape...
assert_equal(ndim(data),3) #... but shape is evaluated from [0]
class TestEnumerates:
def test_enumerates(self):
r=range(10)
d=dict(enumerate(r))
assert_equal(enumerates(d),enumerates(r))
class TestEnsureSorted:
def test_ensure_sorted(self):
# assert_equal(expected, ensure_sorted(iterable, key))
raise SkipTest # implement your test here
class TestIscallable:
def test_iscallable(self):
# assert_equal(expected, iscallable(f))
raise SkipTest # implement your test here
class TestIntersect:
def test_intersect(self):
# http://stackoverflow.com/questions/969709/joining-a-set-of-ordered-integer-yielding-python-iterators
postings = [[1, 100, 142, 322, 12312],
[2, 100, 101, 322, 1221],
[100, 142, 322, 956, 1222]]
assert_equal(intersect(*postings),[100, 322])
class TestKeep:
@classmethod
def setup_class(self):
l=[1,2,3,4,5,6,7,8,9]
k=keep(l)
kl=list(k)
assert_equal(kl,l)
assert_equal(k.val,l[-1])
def test___init__(self):
pass #tested in test_detect_cycle
def test___iter__(self):
pass #tested in test_detect_cycle
def test_next(self):
pass #tested in test_detect_cycle
def test___next__(self):
# keep = keep(iterable)
# assert_equal(expected, keep.__next__())
raise SkipTest # implement your test here
class TestFirstMatch:
def test_first_match(self):
pass #tested in test_detect_cycle
class TestDetectCycle:
def test_detect_cycle(self):
assert_equal(detect_cycle(list('123412341')),(0,4))
assert_equal(detect_cycle(list('012345'+'678'*4)),(6,3))
assert_equal(detect_cycle(list('012345'+'678'*3)),(6,3))
#Floyd fails when repetition isn't long enough (2*i ?):
assert_equal(floyd(list('012345'+'678'*3)),(None,None))
#test from https://rosettacode.org/wiki/Cycle_detection
assert_equal(detect_cycle([3,10,101,2,5,26,167,95,101,2,5,26,167,95]),(2,6))
"""does not work yet because of repeating digits
p3=[1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0, 2, 2,
1, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0, 2,
2, 1, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0,
2, 2, 1, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2,
0, 2, 2, 1, 0, 1, 1, 2, 0, 2, 2]
assert_equal(detect_cycle(p3)[1],8)
"""
from Goulib.math2 import pi_digits_gen
assert_equal(detect_cycle(pi_digits_gen()),(1,2)) # same problem ..
class TestFloyd:
def test_floyd(self):
# assert_equal(expected, floyd(iterable, limit))
raise SkipTest # implement your test here
if __name__ == "__main__":
runmodule()
| lgpl-3.0 |
joyider/op_mon | tests/test_functional.py | 1 | 3998 | # -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
"""
from flask import url_for
from op_mon.user.models import User
from .factories import UserFactory
class TestLoggingIn:
"""Login."""
def test_can_log_in_returns_200(self, user, testapp):
"""Login successful."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form in navbar
form = res.forms['loginForm']
form['username'] = user.username
form['password'] = 'myprecious'
# Submits
res = form.submit().follow()
assert res.status_code == 200
def test_sees_alert_on_log_out(self, user, testapp):
"""Show alert on logout."""
res = testapp.get('/')
# Fills out login form in navbar
form = res.forms['loginForm']
form['username'] = user.username
form['password'] = 'myprecious'
# Submits
res = form.submit().follow()
res = testapp.get(url_for('public.logout')).follow()
# sees alert
assert 'You are logged out.' in res
def test_sees_error_message_if_password_is_incorrect(self, user, testapp):
"""Show error if password is incorrect."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['username'] = user.username
form['password'] = 'wrong'
# Submits
res = form.submit()
# sees error
assert 'Invalid password' in res
def test_sees_error_message_if_username_doesnt_exist(self, user, testapp):
"""Show error if username doesn't exist."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['username'] = 'unknown'
form['password'] = 'myprecious'
# Submits
res = form.submit()
# sees error
assert 'Unknown user' in res
class TestRegistering:
"""Register a user."""
def test_can_register(self, user, testapp):
"""Register a new user."""
old_count = len(User.query.all())
# Goes to homepage
res = testapp.get('/')
# Clicks Create Account button
res = res.click('Create account')
# Fills out the form
form = res.forms['registerForm']
form['username'] = 'foobar'
form['email'] = '[email protected]'
form['password'] = 'secret'
form['confirm'] = 'secret'
# Submits
res = form.submit().follow()
assert res.status_code == 200
# A new user was created
assert len(User.query.all()) == old_count + 1
def test_sees_error_message_if_passwords_dont_match(self, user, testapp):
"""Show error if passwords don't match."""
# Goes to registration page
res = testapp.get(url_for('public.register'))
# Fills out form, but passwords don't match
form = res.forms['registerForm']
form['username'] = 'foobar'
form['email'] = '[email protected]'
form['password'] = 'secret'
form['confirm'] = 'secrets'
# Submits
res = form.submit()
# sees error message
assert 'Passwords must match' in res
def test_sees_error_message_if_user_already_registered(self, user, testapp):
"""Show error if user already registered."""
user = UserFactory(active=True) # A registered user
user.save()
# Goes to registration page
res = testapp.get(url_for('public.register'))
# Fills out form, but username is already registered
form = res.forms['registerForm']
form['username'] = user.username
form['email'] = '[email protected]'
form['password'] = 'secret'
form['confirm'] = 'secret'
# Submits
res = form.submit()
# sees error
assert 'Username already registered' in res
| bsd-3-clause |
disqus/zumanji | src/zumanji/views.py | 1 | 6969 | from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import HttpResponseRedirect, HttpResponseForbidden
from django.shortcuts import render, get_object_or_404
from django.utils import simplejson
from django.views.decorators.csrf import csrf_protect, csrf_exempt
from functools import wraps
from zumanji.forms import UploadJsonForm
from zumanji.helpers import get_trace_data, get_changes, get_git_changes
from zumanji.models import Project, Build, BuildTag, Test
from zumanji.importer import import_build
NOTSET = object()
def api_auth(func):
@wraps(func)
def wrapped(request, *args, **kwargs):
if request.REQUEST.get('api_key'):
if request.REQUEST['api_key'] != settings.ZUMANJI_CONFIG.get('API_KEY', NOTSET):
return HttpResponseForbidden('Invalid api_key')
return func(request, *args, **kwargs)
return csrf_protect(func)(request, *args, **kwargs)
return csrf_exempt(wrapped)
def index(request):
build_qs = Build.objects.order_by('-revision__datetime', '-datetime').select_related('revision')
project_list = []
# lol O(N)
for project in Project.objects.all():
try:
latest_build = build_qs.filter(project=project)[0]
except IndexError:
latest_build = None
project_list.append((project, latest_build))
return render(request, 'zumanji/index.html', {
'project_list': project_list,
})
def view_project(request, project_label):
project = get_object_or_404(Project, label=project_label)
build_list = list(Build.objects
.filter(project=project)
.order_by('-revision__datetime', '-datetime')
.select_related('revision', 'project'))
return render(request, 'zumanji/project.html', {
'project': project,
'build_list': build_list,
})
def view_tag(request, project_label, tag_id):
project = get_object_or_404(Project, label=project_label)
tag = get_object_or_404(BuildTag, pk=tag_id)
build_list = list(Build.objects
.filter(project=project, tags=tag)
.order_by('-datetime')
.select_related('revision', 'project'))
return render(request, 'zumanji/tag.html', {
'project': project,
'tag': tag,
'build_list': build_list,
})
def view_build(request, project_label, build_id, tag_id=None):
filter_args = dict(project__label=project_label, id=build_id)
tag = None
if tag_id:
tag = get_object_or_404(BuildTag, id=tag_id)
filter_args["tags"] = tag
build = get_object_or_404(Build, **filter_args)
project = build.project
previous_build = build.get_previous_build(tag=tag)
next_build = build.get_next_build(tag=tag)
test_list = list(build.test_set
.filter(parent__isnull=True)
.order_by('-upper90_duration'))
compare_with = request.GET.get('compare_with')
if compare_with:
try:
compare_build = Build.objects.get(project__label=project_label, id=compare_with)
except Build.DoesNotExist:
compare_build = None
else:
compare_build = previous_build
changes = get_changes(compare_build, test_list)
if compare_build:
git_changes = get_git_changes(build, compare_build)
else:
git_changes = None
return render(request, 'zumanji/build.html', {
'project': project,
'tag': tag,
'build': build,
'previous_build': previous_build,
'compare_build': compare_build,
'next_build': next_build,
'test_list': test_list,
'changes': changes,
'git_changes': git_changes,
})
def view_test(request, project_label, build_id, test_label):
test = get_object_or_404(Test, project__label=project_label, build=build_id, label=test_label)
project = test.project
build = test.build
test_list = list(Test.objects.filter(parent=test)
.order_by('-upper90_duration')
.select_related('parent'))
# this is actually a <Test>
previous_test_by_build = test.get_test_in_previous_build()
next_test_by_build = test.get_test_in_next_build()
breadcrumbs = [
(reverse('zumanji:view_build', kwargs={'project_label': project.label, 'build_id': build.id}), 'Build #%s' % build.id)
]
last = ''
for node in test.get_context():
node_label = node.label[len(last):]
breadcrumbs.append(
(reverse('zumanji:view_test', kwargs={
'project_label': project.label,
'build_id': build.id,
'test_label': node.label,
}), node_label)
)
last = node.label + '.' # include the dot
previous_builds = test.get_previous_builds(50)
compare_with = request.GET.get('compare_with')
if compare_with:
try:
compare_build = Build.objects.get(project__label=project_label, id=compare_with)
except Build.DoesNotExist:
compare_build = None
else:
compare_build = previous_test_by_build.build if previous_test_by_build else None
if compare_build:
try:
compare_test = compare_build.test_set.get(label=test.label)
except Test.DoesNotExist:
compare_test = None
git_changes = get_git_changes(build, compare_build)
else:
compare_test = None
git_changes = None
trace_results = get_trace_data(test, compare_test)
if previous_test_by_build:
tests_to_check = test_list
changes = get_changes(compare_build, tests_to_check)
else:
changes = []
return render(request, 'zumanji/test.html', {
'breadcrumbs': breadcrumbs,
'project': project,
'build': build,
'previous_test_by_build': previous_test_by_build,
'next_test_by_build': next_test_by_build,
'previous_builds': previous_builds,
'test': test,
'test_list': test_list,
'changes': changes,
'compare_build': compare_build,
'trace_results': trace_results,
'git_changes': git_changes,
})
@api_auth
@transaction.commit_on_success
def upload_project_build(request, project_label):
project = get_object_or_404(Project, label=project_label)
form = UploadJsonForm(request.POST or None, request.FILES or None)
if form.is_valid():
data = simplejson.loads(request.FILES['json_file'].read())
try:
build = import_build(data, project=project.label, revision=form.cleaned_data.get('revision'))
except Exception, e:
form.errors['json_file'] = unicode(e)
else:
return HttpResponseRedirect(reverse('zumanji:view_build', kwargs={
'project_label': project.label, 'build_id': build.id}))
return render(request, 'zumanji/upload_build.html', {
'project': project,
'form': form,
})
| apache-2.0 |
fernandog/Medusa | ext/boto/logs/layer1.py | 146 | 22588 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.logs import exceptions
from boto.compat import json
class CloudWatchLogsConnection(AWSQueryConnection):
"""
Amazon CloudWatch Logs Service API Reference
This is the Amazon CloudWatch Logs API Reference . Amazon
CloudWatch Logs is a managed service for real time monitoring and
archival of application logs. This guide provides detailed
information about Amazon CloudWatch Logs actions, data types,
parameters, and errors. For detailed information about Amazon
CloudWatch Logs features and their associated API calls, go to the
`Amazon CloudWatch Logs Developer Guide`_.
Use the following links to get started using the Amazon CloudWatch
API Reference :
+ `Actions`_: An alphabetical list of all Amazon CloudWatch Logs
actions.
+ `Data Types`_: An alphabetical list of all Amazon CloudWatch
Logs data types.
+ `Common Parameters`_: Parameters that all Query actions can use.
+ `Common Errors`_: Client and server errors that all actions can
return.
+ `Regions and Endpoints`_: Itemized regions and endpoints for all
AWS products.
In addition to using the Amazon CloudWatch Logs API, you can also
use the following SDKs and third-party libraries to access Amazon
CloudWatch Logs programmatically.
+ `AWS SDK for Java Documentation`_
+ `AWS SDK for .NET Documentation`_
+ `AWS SDK for PHP Documentation`_
+ `AWS SDK for Ruby Documentation`_
Developers in the AWS developer community also provide their own
libraries, which you can find at the following AWS developer
centers:
+ `AWS Java Developer Center`_
+ `AWS PHP Developer Center`_
+ `AWS Python Developer Center`_
+ `AWS Ruby Developer Center`_
+ `AWS Windows and .NET Developer Center`_
"""
APIVersion = "2014-03-28"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "logs.us-east-1.amazonaws.com"
ServiceName = "CloudWatchLogs"
TargetPrefix = "Logs_20140328"
ResponseError = JSONResponseError
_faults = {
"LimitExceededException": exceptions.LimitExceededException,
"DataAlreadyAcceptedException": exceptions.DataAlreadyAcceptedException,
"ResourceInUseException": exceptions.ResourceInUseException,
"ServiceUnavailableException": exceptions.ServiceUnavailableException,
"InvalidParameterException": exceptions.InvalidParameterException,
"ResourceNotFoundException": exceptions.ResourceNotFoundException,
"ResourceAlreadyExistsException": exceptions.ResourceAlreadyExistsException,
"OperationAbortedException": exceptions.OperationAbortedException,
"InvalidSequenceTokenException": exceptions.InvalidSequenceTokenException,
}
def __init__(self, **kwargs):
region = kwargs.pop('region', None)
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
if 'host' not in kwargs or kwargs['host'] is None:
kwargs['host'] = region.endpoint
super(CloudWatchLogsConnection, self).__init__(**kwargs)
self.region = region
def _required_auth_capability(self):
return ['hmac-v4']
def create_log_group(self, log_group_name):
"""
Creates a new log group with the specified name. The name of
the log group must be unique within a region for an AWS
account. You can create up to 100 log groups per account.
You must use the following guidelines when naming a log group:
+ Log group names can be between 1 and 512 characters long.
+ Allowed characters are az, AZ, 09, '_' (underscore), '-'
(hyphen), '/' (forward slash), and '.' (period).
Log groups are created with a default retention of 14 days.
The retention attribute allow you to configure the number of
days you want to retain log events in the specified log group.
See the `SetRetention` operation on how to modify the
retention of your log groups.
:type log_group_name: string
:param log_group_name:
"""
params = {'logGroupName': log_group_name, }
return self.make_request(action='CreateLogGroup',
body=json.dumps(params))
def create_log_stream(self, log_group_name, log_stream_name):
"""
Creates a new log stream in the specified log group. The name
of the log stream must be unique within the log group. There
is no limit on the number of log streams that can exist in a
log group.
You must use the following guidelines when naming a log
stream:
+ Log stream names can be between 1 and 512 characters long.
+ The ':' colon character is not allowed.
:type log_group_name: string
:param log_group_name:
:type log_stream_name: string
:param log_stream_name:
"""
params = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name,
}
return self.make_request(action='CreateLogStream',
body=json.dumps(params))
def delete_log_group(self, log_group_name):
"""
Deletes the log group with the specified name. Amazon
CloudWatch Logs will delete a log group only if there are no
log streams and no metric filters associated with the log
group. If this condition is not satisfied, the request will
fail and the log group will not be deleted.
:type log_group_name: string
:param log_group_name:
"""
params = {'logGroupName': log_group_name, }
return self.make_request(action='DeleteLogGroup',
body=json.dumps(params))
def delete_log_stream(self, log_group_name, log_stream_name):
"""
Deletes a log stream and permanently deletes all the archived
log events associated with it.
:type log_group_name: string
:param log_group_name:
:type log_stream_name: string
:param log_stream_name:
"""
params = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name,
}
return self.make_request(action='DeleteLogStream',
body=json.dumps(params))
def delete_metric_filter(self, log_group_name, filter_name):
"""
Deletes a metric filter associated with the specified log
group.
:type log_group_name: string
:param log_group_name:
:type filter_name: string
:param filter_name: The name of the metric filter.
"""
params = {
'logGroupName': log_group_name,
'filterName': filter_name,
}
return self.make_request(action='DeleteMetricFilter',
body=json.dumps(params))
def delete_retention_policy(self, log_group_name):
"""
:type log_group_name: string
:param log_group_name:
"""
params = {'logGroupName': log_group_name, }
return self.make_request(action='DeleteRetentionPolicy',
body=json.dumps(params))
def describe_log_groups(self, log_group_name_prefix=None,
next_token=None, limit=None):
"""
Returns all the log groups that are associated with the AWS
account making the request. The list returned in the response
is ASCII-sorted by log group name.
By default, this operation returns up to 50 log groups. If
there are more log groups to list, the response would contain
a `nextToken` value in the response body. You can also limit
the number of log groups returned in the response by
specifying the `limit` parameter in the request.
:type log_group_name_prefix: string
:param log_group_name_prefix:
:type next_token: string
:param next_token: A string token used for pagination that points to
the next page of results. It must be a value obtained from the
response of the previous `DescribeLogGroups` request.
:type limit: integer
:param limit: The maximum number of items returned in the response. If
you don't specify a value, the request would return up to 50 items.
"""
params = {}
if log_group_name_prefix is not None:
params['logGroupNamePrefix'] = log_group_name_prefix
if next_token is not None:
params['nextToken'] = next_token
if limit is not None:
params['limit'] = limit
return self.make_request(action='DescribeLogGroups',
body=json.dumps(params))
def describe_log_streams(self, log_group_name,
log_stream_name_prefix=None, next_token=None,
limit=None):
"""
Returns all the log streams that are associated with the
specified log group. The list returned in the response is
ASCII-sorted by log stream name.
By default, this operation returns up to 50 log streams. If
there are more log streams to list, the response would contain
a `nextToken` value in the response body. You can also limit
the number of log streams returned in the response by
specifying the `limit` parameter in the request.
:type log_group_name: string
:param log_group_name:
:type log_stream_name_prefix: string
:param log_stream_name_prefix:
:type next_token: string
:param next_token: A string token used for pagination that points to
the next page of results. It must be a value obtained from the
response of the previous `DescribeLogStreams` request.
:type limit: integer
:param limit: The maximum number of items returned in the response. If
you don't specify a value, the request would return up to 50 items.
"""
params = {'logGroupName': log_group_name, }
if log_stream_name_prefix is not None:
params['logStreamNamePrefix'] = log_stream_name_prefix
if next_token is not None:
params['nextToken'] = next_token
if limit is not None:
params['limit'] = limit
return self.make_request(action='DescribeLogStreams',
body=json.dumps(params))
def describe_metric_filters(self, log_group_name,
filter_name_prefix=None, next_token=None,
limit=None):
"""
Returns all the metrics filters associated with the specified
log group. The list returned in the response is ASCII-sorted
by filter name.
By default, this operation returns up to 50 metric filters. If
there are more metric filters to list, the response would
contain a `nextToken` value in the response body. You can also
limit the number of metric filters returned in the response by
specifying the `limit` parameter in the request.
:type log_group_name: string
:param log_group_name:
:type filter_name_prefix: string
:param filter_name_prefix: The name of the metric filter.
:type next_token: string
:param next_token: A string token used for pagination that points to
the next page of results. It must be a value obtained from the
response of the previous `DescribeMetricFilters` request.
:type limit: integer
:param limit: The maximum number of items returned in the response. If
you don't specify a value, the request would return up to 50 items.
"""
params = {'logGroupName': log_group_name, }
if filter_name_prefix is not None:
params['filterNamePrefix'] = filter_name_prefix
if next_token is not None:
params['nextToken'] = next_token
if limit is not None:
params['limit'] = limit
return self.make_request(action='DescribeMetricFilters',
body=json.dumps(params))
def get_log_events(self, log_group_name, log_stream_name,
start_time=None, end_time=None, next_token=None,
limit=None, start_from_head=None):
"""
Retrieves log events from the specified log stream. You can
provide an optional time range to filter the results on the
event `timestamp`.
By default, this operation returns as much log events as can
fit in a response size of 1MB, up to 10,000 log events. The
response will always include a `nextForwardToken` and a
`nextBackwardToken` in the response body. You can use any of
these tokens in subsequent `GetLogEvents` requests to paginate
through events in either forward or backward direction. You
can also limit the number of log events returned in the
response by specifying the `limit` parameter in the request.
:type log_group_name: string
:param log_group_name:
:type log_stream_name: string
:param log_stream_name:
:type start_time: long
:param start_time: A point in time expressed as the number milliseconds
since Jan 1, 1970 00:00:00 UTC.
:type end_time: long
:param end_time: A point in time expressed as the number milliseconds
since Jan 1, 1970 00:00:00 UTC.
:type next_token: string
:param next_token: A string token used for pagination that points to
the next page of results. It must be a value obtained from the
`nextForwardToken` or `nextBackwardToken` fields in the response of
the previous `GetLogEvents` request.
:type limit: integer
:param limit: The maximum number of log events returned in the
response. If you don't specify a value, the request would return as
much log events as can fit in a response size of 1MB, up to 10,000
log events.
:type start_from_head: boolean
:param start_from_head:
"""
params = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name,
}
if start_time is not None:
params['startTime'] = start_time
if end_time is not None:
params['endTime'] = end_time
if next_token is not None:
params['nextToken'] = next_token
if limit is not None:
params['limit'] = limit
if start_from_head is not None:
params['startFromHead'] = start_from_head
return self.make_request(action='GetLogEvents',
body=json.dumps(params))
def put_log_events(self, log_group_name, log_stream_name, log_events,
sequence_token=None):
"""
Uploads a batch of log events to the specified log stream.
Every PutLogEvents request must include the `sequenceToken`
obtained from the response of the previous request. An upload
in a newly created log stream does not require a
`sequenceToken`.
The batch of events must satisfy the following constraints:
+ The maximum batch size is 32,768 bytes, and this size is
calculated as the sum of all event messages in UTF-8, plus 26
bytes for each log event.
+ None of the log events in the batch can be more than 2 hours
in the future.
+ None of the log events in the batch can be older than 14
days or the retention period of the log group.
+ The log events in the batch must be in chronological ordered
by their `timestamp`.
+ The maximum number of log events in a batch is 1,000.
:type log_group_name: string
:param log_group_name:
:type log_stream_name: string
:param log_stream_name:
:type log_events: list
:param log_events: A list of events belonging to a log stream.
:type sequence_token: string
:param sequence_token: A string token that must be obtained from the
response of the previous `PutLogEvents` request.
"""
params = {
'logGroupName': log_group_name,
'logStreamName': log_stream_name,
'logEvents': log_events,
}
if sequence_token is not None:
params['sequenceToken'] = sequence_token
return self.make_request(action='PutLogEvents',
body=json.dumps(params))
def put_metric_filter(self, log_group_name, filter_name, filter_pattern,
metric_transformations):
"""
Creates or updates a metric filter and associates it with the
specified log group. Metric filters allow you to configure
rules to extract metric data from log events ingested through
`PutLogEvents` requests.
:type log_group_name: string
:param log_group_name:
:type filter_name: string
:param filter_name: The name of the metric filter.
:type filter_pattern: string
:param filter_pattern:
:type metric_transformations: list
:param metric_transformations:
"""
params = {
'logGroupName': log_group_name,
'filterName': filter_name,
'filterPattern': filter_pattern,
'metricTransformations': metric_transformations,
}
return self.make_request(action='PutMetricFilter',
body=json.dumps(params))
def put_retention_policy(self, log_group_name, retention_in_days):
"""
:type log_group_name: string
:param log_group_name:
:type retention_in_days: integer
:param retention_in_days: Specifies the number of days you want to
retain log events in the specified log group. Possible values are:
1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
"""
params = {
'logGroupName': log_group_name,
'retentionInDays': retention_in_days,
}
return self.make_request(action='PutRetentionPolicy',
body=json.dumps(params))
def set_retention(self, log_group_name, retention_in_days):
"""
Sets the retention of the specified log group. Log groups are
created with a default retention of 14 days. The retention
attribute allow you to configure the number of days you want
to retain log events in the specified log group.
:type log_group_name: string
:param log_group_name:
:type retention_in_days: integer
:param retention_in_days: Specifies the number of days you want to
retain log events in the specified log group. Possible values are:
1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
"""
params = {
'logGroupName': log_group_name,
'retentionInDays': retention_in_days,
}
return self.make_request(action='SetRetention',
body=json.dumps(params))
def test_metric_filter(self, filter_pattern, log_event_messages):
"""
Tests the filter pattern of a metric filter against a sample
of log event messages. You can use this operation to validate
the correctness of a metric filter pattern.
:type filter_pattern: string
:param filter_pattern:
:type log_event_messages: list
:param log_event_messages:
"""
params = {
'filterPattern': filter_pattern,
'logEventMessages': log_event_messages,
}
return self.make_request(action='TestMetricFilter',
body=json.dumps(params))
def make_request(self, action, body):
headers = {
'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
'Host': self.region.endpoint,
'Content-Type': 'application/x-amz-json-1.1',
'Content-Length': str(len(body)),
}
http_request = self.build_base_http_request(
method='POST', path='/', auth_path='/', params={},
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
return json.loads(response_body)
else:
json_body = json.loads(response_body)
fault_name = json_body.get('__type', None)
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
| gpl-3.0 |
ZwEin27/phone-number-matcher | dig_phone_extractor.py | 1 | 23737 | # -*- coding: utf-8 -*-
# @Author: ZwEin
# @Date: 2016-06-21 12:36:47
# @Last Modified by: ZwEin
# @Last Modified time: 2016-09-29 21:54:12
import os
import re
import sys
import json
import copy
import types
import string
import collections
import phonenumbers
from datetime import datetime
from crf_tokenizer import CrfTokenizer
from urlparse import urlparse
from string import maketrans
from phonenumbers.phonenumberutil import NumberParseException
from difflib import SequenceMatcher
def is_valid_datetime(raw, date_format):
try:
datetime.strptime(raw, date_format)
return True
except ValueError:
return False
class Preprocessor():
re_prep = re.compile(r'[\(\)]')
reg_simple_format = [
r'(?:(?<=[ \A\b-\.\?])\d{3}[ \?\.-]\d{3}[ \?\.-]\d{4}(?=[ \Z\b-\.\?]))'
]
re_simple_format = re.compile(r'(?:'+r'|'.join(reg_simple_format)+r')')
datetime_regexes = [
r"(?:\d{2}[ _-]\d{2}[ _-]\d{4})",
r"(?:\d{4}[ _-]\d{2}[ _-]\d{2})"
]
datetime_regex = r"(?:" + r"|".join(datetime_regexes) + ")"
re_datetime_regex = re.compile(datetime_regex)
re_digits_regex = re.compile(r"\d+")
def prep_datetime(self, raw):
m = Preprocessor.re_datetime_regex.findall(raw)
for d in m:
dd = ''.join(Preprocessor.re_digits_regex.findall(d))
if is_valid_datetime(dd, '%Y%m%d') or is_valid_datetime(dd, '%m%d%Y'):
raw = raw.replace(d, "")
return raw
money_regex = r"(?:(?<=[\D])\$\d+(?=[\W_]))"
units = ['lbs', 'kg', 'hour', 'hr', 'hh']
unit_regex = r"(?:\d+[\s\W]*(" + r"|".join(units) + "))"
others_regexes = [
r"24/7",
r"#\d+",
r"\d+\'\d+",
r"(?<=[\W_])\d{5}[\W_]{1,}\d{5}(?=[\W_])",
r"- {1,}\d+$",
r"\d+\%"
]
other_regex = r"(?:" + "|".join(others_regexes) + ")"
all_regexes = [money_regex, unit_regex, other_regex]
all_regex = r"(" + r"|".join(all_regexes) + ")"
re_all_regex = re.compile(all_regex)
def preprocess(self, raw):
raw = raw.lower()
raw = raw.encode('ascii', 'ignore')
raw = self.prep_datetime(raw)
raw = Preprocessor.re_prep.sub(' ', raw)
raw = Preprocessor.re_all_regex.sub('', raw)
raw = Preprocessor.re_simple_format.sub('pnwrapper \g<0> pnwrapper', raw)
return raw
SOURCE_TYPE_TEXT = 'text'
SOURCE_TYPE_URL = 'url'
class Tokenizer():
re_2_digts_only_in_url_regex = re.compile(r'(?<=[-_])\d{2}(?=[_/])')
re_all_alphabet_in_url_regex = re.compile(r'\w+')
def __init__(self, source_type='text'):
self.set_source_type(source_type)
def set_source_type(self, source_type):
"""
'text' or 'url'
"""
st = source_type.lower()
if source_type.lower() not in [SOURCE_TYPE_TEXT, SOURCE_TYPE_URL] :
raise Exception(source_type + ' is not a source type, which should be "text" or "url"')
self.source_type = source_type
def remove_punctuation(self, raw):
return raw.translate(string.maketrans("",""), string.punctuation)
def tokenize(self, raw):
result = None
if self.source_type == SOURCE_TYPE_TEXT:
result = self.tokenize_text(raw)
elif self.source_type == SOURCE_TYPE_URL:
result = self.tokenize_url(raw)
return ' '.join(result.split())
def tokenize_text(self, raw):
t = CrfTokenizer()
t.setRecognizeHtmlEntities(True)
t.setRecognizeHtmlTags(True)
t.setSkipHtmlTags(True)
t.setRecognizePunctuation(True)
tokens = t.tokenize(raw)
tokens = ' '.join(tokens)
tokens = self.remove_punctuation(tokens)
return tokens
def tokenize_url(self, raw):
SEPARATOR = ' '
url_obj = urlparse(raw)
# parse netloc
netloc = url_obj.netloc.split('.')[:-2] # get rid of port numbers, ext and domain name
# parse path
path = url_obj.path
path = Tokenizer.re_2_digts_only_in_url_regex.sub('', path)
path = path.split('/')
content = netloc + path
content = [SEPARATOR.join(Tokenizer.re_all_alphabet_in_url_regex.findall(_)) for _ in content]
# parse params
# url_obj.params
# parse query
# url_obj.query
return ' sep '.join(content)
class Cleaner():
def prep_misspelled_numeral_words(self, raw):
misspelling_dict = {
"th0usand": "thousand",
"th1rteen": "thirteen",
"f0urteen": "fourteen",
"e1ghteen": "eighteen",
"n1neteen": "nineteen",
"f1fteen": "fifteen",
"s1xteen": "sixteen",
"th1rty": "thirty",
"e1ghty": "eighty",
"n1nety": "ninety",
"fourty": "forty",
"f0urty": "forty",
"e1ght": "eight",
"f0rty": "forty",
"f1fty": "fifty",
"s1xty": "sixty",
"zer0": "zero",
"for": "four",
"f0ur": "four",
"f1ve": "five",
"n1ne": "nine",
"0ne": "one",
"too": "two",
"tw0": "two",
"to": "two",
"s1x": "six"
}
for key in misspelling_dict.keys():
raw = raw.replace(key, misspelling_dict[key])
return raw
numbers = ['zero', 'one', 'two', 'three', 'four', 'five', 'siz', 'seven', 'eight', 'nine']
re_twenty_x = re.compile(r"(two|twenty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_thirty_x = re.compile(r"(three|thirty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_forty_x = re.compile(r"(four|forty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_fifty_x = re.compile(r"(five|fifty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_sixty_x = re.compile(r"(six|sixty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_seventy_x = re.compile(r"(seven|seventy[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_eighty_x = re.compile(r"(eight|eighty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_ninety_x = re.compile(r"(nine|ninety[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_ten = re.compile(r"(?<=[ilo0-9])ten(?=[ \b0-9])")
re_one = re.compile(r'(?:(?<=([0-9yneorxt]| ))one|(?:(?<=[ils])[i]((?=[ils])|$)))')
re_zero = re.compile(r'(?:zero|oh|(?:(?<=[0-9])(o+?))|(?:o(?=[0-9]))|(?:(?<=[o\s])o(?=[o\s])))')
def prep_replace_numeral_words(self, raw):
raw = raw.replace("hundred", "00")
raw = raw.replace("thousand", "000")
raw = raw.replace("eleven", "11")
raw = raw.replace("twelve", "12")
raw = raw.replace("thirteen", "13")
raw = raw.replace("fourteen", "14")
raw = raw.replace("fifteen", "15")
raw = raw.replace("sixteen", "16")
raw = raw.replace("seventeen", "17")
raw = raw.replace("eighteen", "18")
raw = raw.replace("nineteen", "19")
raw = Cleaner.re_twenty_x.sub("2", raw)
raw = Cleaner.re_thirty_x.sub("3", raw)
raw = Cleaner.re_forty_x.sub("4", raw)
raw = Cleaner.re_fifty_x.sub("5", raw)
raw = Cleaner.re_sixty_x.sub("6", raw)
raw = Cleaner.re_seventy_x.sub("7", raw)
raw = Cleaner.re_eighty_x.sub("8", raw)
raw = Cleaner.re_ninety_x.sub("9", raw)
raw = Cleaner.re_ten.sub("10", raw)
raw = Cleaner.re_one.sub("1", raw)
raw = Cleaner.re_zero.sub("0", raw)
raw = raw.replace("twenty", "20")
raw = raw.replace("thirty", "30")
raw = raw.replace("forty", "40")
raw = raw.replace("fifty", "50")
raw = raw.replace("sixty", "60")
raw = raw.replace("seventy", "70")
raw = raw.replace("eighty", "80")
raw = raw.replace("ninety", "90")
return raw
def clean(self, raw):
raw = self.prep_misspelled_numeral_words(raw)
raw = self.prep_replace_numeral_words(raw)
# print raw
return raw
class ZEExtractor():
def __init__(self):
pass
prefix = r'(?:(?<=[\A\b\sa-zA-Z])|^)'
# prefix = r'\b'
# prefix = r'[ ]?'
postfix = r'(?:(?=[\Z\b\sa-zA-Z])|$)'
# postfix = r'\b'
# postfix = r'[ ]?'
phone_number_format_regex = [
r'(?:'+prefix+r"\d{10,13}"+postfix+r')',
r'(?:'+prefix+r"\d{9,10}"+postfix+r')',
r'(?:'+prefix+r"\d{8}[ ]\d{3,4}"+postfix+r')',
r'(?:'+prefix+r"\d{7}[ ]\d{3,4}"+postfix+r')',
r'(?:'+prefix+r"\d{6}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{6}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{4}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{4}[ ]\d{2}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{4}[ ]\d{4}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{4}[ ]\d{2}[ ]\d{2}[ ]\d{2}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{4}[ ]\d{3}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{7,8}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{4}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{4}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{3}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{2}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{1}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{2}[ ]\d{4}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{2}[ ]\d{8}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{8}[ ]\d{1}"+postfix+r')', # \d{2}[ ] ...
r'(?:'+prefix+r"\d{1}[ ]\d{3}[ ]\d{3}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')'
]
# numbers_regex = r"(?:" + r"|".join(phone_number_format_regex) + r")"
numbers_regex = r"(?:" + r"|".join(phone_number_format_regex) + r")"
re_numbers_regex = re.compile(numbers_regex)
# print numbers_regex
def extract(self, raw):
raw = ZEExtractor.re_numbers_regex.findall(raw)
raw = [''.join(_.split()) for _ in raw if len(_.strip()) >= 10]
return '\t'.join(raw)
class Validator():
re_zero = re.compile(r'0{3,}')
def validate_phone_number_with_coutry_code(self, raw, country_code='US'):
try:
z = phonenumbers.parse(raw, country_code)
except NumberParseException, e:
pass
"""
if e.error_type == NumberParseException.INVALID_COUNTRY_CODE:
# Invalid country code specified
return []
elif e.error_type == NumberParseException.NOT_A_NUMBER:
# The string passed in had fewer than 3 digits in it.
# The number failed to match the regular expression
return []
elif e.error_type == NumberParseException.TOO_SHORT_AFTER_IDD:
# The string started with an international dialing prefix
# but after this was removed, it had fewer digits than any
# valid phone number (including country code) could have.
return []
elif e.error_type == NumberParseException.TOO_SHORT_NSN:
# After any country code has been stripped, the string
# had fewer digits than any valid phone number could have.
return []
elif e.error_type == NumberParseException.TOO_LONG:
# String had more digits than any valid phone number could have
return []
"""
# print e.error_type, e._msg
else:
if phonenumbers.is_possible_number(z) and phonenumbers.is_valid_number(z):
return [raw]
else:
return []
def validate_phone_number(self, raw):
# match all countries if using area_code.get_all_country_iso_two_letter_code()
# may include too short phone numbers if use 'DE'
country_code_list = ['US', 'CN', 'IN', 'UA', 'JP', 'RU', 'IT', 'DE', 'CA', 'TR']
for country_code in country_code_list:
rtn = self.validate_phone_number_with_coutry_code(raw, country_code=country_code)
if rtn:
return rtn
def is_datetime(self, raw):
size = len(raw)
date_format = ''
if size == 14:
return is_valid_datetime(raw, '%Y%m%d%H%M%S')
elif size == 8:
return is_valid_datetime(raw, '%Y%m%d')
elif size == 6:
return is_valid_datetime(raw, '%Y%m%d') or is_valid_datetime(raw, '%H%M%S')
else:
return False
re_num_digits = [
None,
re.compile(r"\d{1}"),
re.compile(r"\d{2}"),
re.compile(r"\d{3}"),
re.compile(r"\d{4}"),
re.compile(r"\d{5}"),
re.compile(r"\d{6}")
]
def is_all_dup_digits(self, raw):
for i in range(1, 6):
rtn = Validator.re_num_digits[i].findall(raw)
if len(raw) % i != 0:
continue
if all(rtn[0] == rest for rest in rtn):
return True
return False
re_start_zero = re.compile(r'^0+')
def suggest_most_overlap(self, extracted_phone_list):
def similar(a, b):
return SequenceMatcher(None, a, b).ratio()
potential_invalid, potential_valid = [], []
for pn in extracted_phone_list:
if len(pn) == 10:
potential_valid.append(pn)
else:
potential_invalid.append(pn)
ans = list(potential_valid)
for pi in potential_invalid:
if any(similar(pi, pv) < .3 for pv in potential_valid):
ans.append(pi)
return ans
def validate(self, raw):
ans = []
for nums in raw.split('\t'):
nums = nums.strip()
nums = Validator.re_start_zero.sub('', nums)
if len(nums) > 16:
continue
if len(Validator.re_zero.findall(nums)):
continue
if self.is_all_dup_digits(nums):
continue
if self.is_datetime(nums):
continue
ans += [nums]
# valid = self.validate_phone_number(nums)
# if valid:
# ans.extend(valid)
ans = list(set(ans))
ans = self.suggest_most_overlap(ans)
return ' '.join(ans)
class Normalizer():
# try extracting from this one live escort reviews pnwrapper 754 307 7279 pnwrapper 49 91 3524432077 you won t be disappointedangel
re_digits = re.compile(r'(?:(?<=[ \s\b\Aa-zA-Z])[\d ]+(?=[ \s\b\Za-zA-Z]))')
def normalize(self, cleaned_output, uncleaned_output, output_format='list'):
# print [_.strip() for _ in Normalizer.re_digits.findall(tokenized_content) if _.strip() != '']
if output_format == 'obfuscation':
output = []
for co in cleaned_output.split():
phonenum = {}
phonenum['telephone'] = co
if co in uncleaned_output:
phonenum['obfuscation'] = 'False'
else:
phonenum['obfuscation'] = 'True'
output.append(phonenum)
return output
else:
return cleaned_output.split()
class PhoneNumberExtractor(object):
PN_OUTPUT_FORMAT_LIST = 'list'
PN_OUTPUT_FORMAT_OBFUSCATION = 'obfuscation'
def __init__(self, _output_format='list'):
self.preprocessor = Preprocessor()
self.tokenizer = Tokenizer(source_type='text')
self.extractor = ZEExtractor()
self.cleaner = Cleaner()
self.validator = Validator()
self.normalizer = Normalizer()
self.set_output_format(_output_format)
def set_output_format(self, _output_format):
# 1. list, 2. obfuscation
if _output_format not in [PhoneNumberExtractor.PN_OUTPUT_FORMAT_LIST, PhoneNumberExtractor.PN_OUTPUT_FORMAT_OBFUSCATION]:
raise Exception('output_format should be "list" or "obfuscation"')
self.output_format = _output_format
def do_process(self, content, source_type='text', do_preprocess=True, do_tokenize=True, do_clean=True, do_extract=True, do_validate=True):
if do_preprocess:
content = self.preprocessor.preprocess(content)
if do_tokenize:
self.tokenizer.set_source_type(source_type)
content = self.tokenizer.tokenize(content)
if do_clean:
content = self.cleaner.clean(content)
if do_extract:
content = self.extractor.extract(content)
if do_validate:
content = self.validator.validate(content)
return content
def match(self, content, source_type='text'):
cleaned_ans = self.do_process(content, source_type=source_type)
uncleaned_ans = self.do_process(content, source_type=source_type, do_clean=False)
return self.normalizer.normalize(cleaned_ans, uncleaned_ans, output_format=self.output_format)
########################################################################
# URLExtractor
########################################################################
import esm
import idna
import tldextract
re_dot = re.compile(r'(?:\s+?dot\s+?)', re.IGNORECASE)
reg_url_charactor = '[a-z0-9-.]'
re_url_charactor = re.compile(reg_url_charactor, re.IGNORECASE)
re_pretld = re.compile(reg_url_charactor+'+?$', re.IGNORECASE)
re_posttld = re.compile(':?[0-9]*[/[!#$&-;=?a-z_]+]?', re.IGNORECASE)
class URLExtractor(object):
def __init_tld_index():
tldindex = esm.Index()
tlds = (tldextract.TLDExtract()._get_tld_extractor().tlds)
ldindex = esm.Index()
for tld in tlds:
tldindex.enter('.' + tld.encode('idna'))
tldindex.fix()
return tldindex
tldindex = __init_tld_index()
@staticmethod
def preprocess(text):
def clean(text):
text = re_dot.sub('.', text)
return text
text = clean(text)
return text
@staticmethod
def query(text):
ans = []
exts = URLExtractor.tldindex.query(text)
for ext in exts:
pretld, posttld = None, None
url = ''
tld = ext[1]
startpt, endpt = ext[0][0], ext[0][1]
if len(text) > endpt:
nextcharacter = text[endpt]
if re_url_charactor.match(nextcharacter):
continue
posttld = re_posttld.match(text[endpt:])
pretld = re_pretld.search(text[:startpt])
if pretld:
url = pretld.group(0)
startpt -= len(pretld.group(0))
url += tld
if posttld:
url += posttld.group(0)
endpt += len(posttld.group(0))
url = url.rstrip(',.')
ans.append(url)
ans = list(set([_ for _ in ans if _]))
return ans
@staticmethod
def extract(text):
text = text.encode('ascii', 'ignore')
text= URLExtractor.preprocess(text)
ans = URLExtractor.query(text)
return ans
# in production
# from digExtractor.extractor import Extractor
# in test
class Extractor:
def extract(doc):
raise NotImplementedError( "Need to implement extract function" )
# should create a new dictionary each time
def get_metadata():
raise NotImplementedError( "Need to implement get_metadata function" )
def set_metadata():
raise NotImplementedError( "Need to implement set_metadata function" )
def get_renamed_input_fields(self):
raise NotImplementedError( "Need to implement get_renamed_input_fields function" )
def set_renamed_input_fields(self, renamed_input_fields):
if not (isinstance(renamed_input_fields, basestring) or isinstance(renamed_input_fields, types.ListType)):
raise ValueError("renamed_input_fields must be a string or a list")
self.renamed_input_fields = renamed_input_fields
return self
class PhoneExtractor(Extractor):
def __init__(self):
self.renamed_input_fields = '' # ? renamed_input_fields
def extract(self, doc):
urls = URLExtractor.extract(doc)
extractor = PhoneNumberExtractor()
extracts = []
for url in urls:
extracts += extractor.match(url, source_type='url')
doc = doc.replace(url, '')
extracts += extractor.match(doc, source_type='text')
return extracts
def get_metadata(self):
return copy.copy(self.metadata)
def set_metadata(self, metadata):
self.metadata = metadata
return self
def get_renamed_input_fields(self):
return self.renamed_input_fields
def set_renamed_input_fields(self, renamed_input_fields):
if not (isinstance(renamed_input_fields, basestring) or isinstance(renamed_input_fields, types.ListType)):
raise ValueError("renamed_input_fields must be a string or a list")
self.renamed_input_fields = renamed_input_fields
return self
if __name__ == '__main__':
doc = "71857376 71857376718 test 71857376719 718573767185 71837376718 71981090718 718573767198 719810907185 71857376150 1171857376 http://costarica.backpage.com/BodyRubs/hoy-cerramos-a-las-11-71857376/2909373 Sexy new girl in town searching for a great date wiff u Naughty fresh girl here searching 4 a great date wiff you Sweet new girl in town seeking for a good date with u for80 2sixseven one9zerofor 90hr incall or out call"
pe = PhoneExtractor()
print pe.extract(doc)
"""
# Samples
# from phone_number_extractor import PhoneNumberExtractor
extractor = PhoneNumberExtractor()
url_string = "http://costarica.backpage.com/BodyRubs/hoy-cerramos-a-las-11-71857376/2909373"
url_phone_numbers = extractor.match(url_string, source_type='url')
print url_phone_numbers
# text_string = "Sexy new girl in town searching for a great date wiff u Naughty fresh girl here searching 4 a great date wiff you Sweet new girl in town seeking for a good date with u for80 2sixseven one9zerofor 90hr incall or out call"
text_string = "71857376 71857376718 test 71857376719 718573767185 71837376718 71981090718 718573767198 719810907185 71857376150 1171857376"
text_phone_numbers = extractor.match(text_string, source_type='text')
print text_phone_numbers
"""
| apache-2.0 |
ampax/edx-platform | common/test/acceptance/tests/lms/test_lms_cohorted_courseware_search.py | 13 | 14570 | """
Test courseware search
"""
import json
import uuid
from ..helpers import remove_file
from ...pages.common.logout import LogoutPage
from ...pages.studio.overview import CourseOutlinePage
from ...pages.lms.courseware_search import CoursewareSearchPage
from ...pages.lms.staff_view import StaffPage
from ...fixtures.course import XBlockFixtureDesc
from nose.plugins.attrib import attr
from ..studio.base_studio_test import ContainerBase
from ...pages.studio.settings_group_configurations import GroupConfigurationsPage
from ...pages.studio.auto_auth import AutoAuthPage as StudioAutoAuthPage
from ...fixtures import LMS_BASE_URL
from ...pages.studio.component_editor import ComponentVisibilityEditorView
from ...pages.lms.instructor_dashboard import InstructorDashboardPage
from bok_choy.promise import EmptyPromise
@attr('shard_1')
class CoursewareSearchCohortTest(ContainerBase):
"""
Test courseware search.
"""
TEST_INDEX_FILENAME = "test_root/index_file.dat"
def setUp(self, is_staff=True):
"""
Create search page and course content to search
"""
# create test file in which index for this test will live
with open(self.TEST_INDEX_FILENAME, "w+") as index_file:
json.dump({}, index_file)
self.addCleanup(remove_file, self.TEST_INDEX_FILENAME)
super(CoursewareSearchCohortTest, self).setUp(is_staff=is_staff)
self.staff_user = self.user
self.course_outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.content_group_a = "Content Group A"
self.content_group_b = "Content Group B"
# Create a student who will be in "Cohort A"
self.cohort_a_student_username = "cohort_a_" + str(uuid.uuid4().hex)[:12]
self.cohort_a_student_email = self.cohort_a_student_username + "@example.com"
StudioAutoAuthPage(
self.browser, username=self.cohort_a_student_username, email=self.cohort_a_student_email, no_login=True
).visit()
# Create a student who will be in "Cohort B"
self.cohort_b_student_username = "cohort_b_" + str(uuid.uuid4().hex)[:12]
self.cohort_b_student_email = self.cohort_b_student_username + "@example.com"
StudioAutoAuthPage(
self.browser, username=self.cohort_b_student_username, email=self.cohort_b_student_email, no_login=True
).visit()
# Create a student who will end up in the default cohort group
self.cohort_default_student_username = "cohort_default_student"
self.cohort_default_student_email = "[email protected]"
StudioAutoAuthPage(
self.browser, username=self.cohort_default_student_username,
email=self.cohort_default_student_email, no_login=True
).visit()
self.courseware_search_page = CoursewareSearchPage(self.browser, self.course_id)
# Enable Cohorting and assign cohorts and content groups
self._auto_auth(self.staff_user["username"], self.staff_user["email"], True)
self.enable_cohorting(self.course_fixture)
self.create_content_groups()
self.link_html_to_content_groups_and_publish()
self.create_cohorts_and_assign_students()
self._studio_reindex()
def _auto_auth(self, username, email, staff):
"""
Logout and login with given credentials.
"""
LogoutPage(self.browser).visit()
StudioAutoAuthPage(self.browser, username=username, email=email,
course_id=self.course_id, staff=staff).visit()
def _studio_reindex(self):
"""
Reindex course content on studio course page
"""
self._auto_auth(self.staff_user["username"], self.staff_user["email"], True)
self.course_outline.visit()
self.course_outline.start_reindex()
self.course_outline.wait_for_ajax()
def _goto_staff_page(self):
"""
Open staff page with assertion
"""
self.courseware_search_page.visit()
staff_page = StaffPage(self.browser, self.course_id)
self.assertEqual(staff_page.staff_view_mode, 'Staff')
return staff_page
def populate_course_fixture(self, course_fixture):
"""
Populate the children of the test course fixture.
"""
self.group_a_html = 'GROUPACONTENT'
self.group_b_html = 'GROUPBCONTENT'
self.group_a_and_b_html = 'GROUPAANDBCONTENT'
self.visible_to_all_html = 'VISIBLETOALLCONTENT'
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('html', self.group_a_html, data='<html>GROUPACONTENT</html>'),
XBlockFixtureDesc('html', self.group_b_html, data='<html>GROUPBCONTENT</html>'),
XBlockFixtureDesc('html', self.group_a_and_b_html, data='<html>GROUPAANDBCONTENT</html>'),
XBlockFixtureDesc('html', self.visible_to_all_html, data='<html>VISIBLETOALLCONTENT</html>')
)
)
)
)
def enable_cohorting(self, course_fixture):
"""
Enables cohorting for the current course.
"""
url = LMS_BASE_URL + "/courses/" + course_fixture._course_key + '/cohorts/settings' # pylint: disable=protected-access
data = json.dumps({'is_cohorted': True})
response = course_fixture.session.patch(url, data=data, headers=course_fixture.headers)
self.assertTrue(response.ok, "Failed to enable cohorts")
def create_content_groups(self):
"""
Creates two content groups in Studio Group Configurations Settings.
"""
group_configurations_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
group_configurations_page.visit()
group_configurations_page.create_first_content_group()
config = group_configurations_page.content_groups[0]
config.name = self.content_group_a
config.save()
group_configurations_page.add_content_group()
config = group_configurations_page.content_groups[1]
config.name = self.content_group_b
config.save()
def link_html_to_content_groups_and_publish(self):
"""
Updates 3 of the 4 existing html to limit their visibility by content group.
Publishes the modified units.
"""
container_page = self.go_to_unit_page()
def set_visibility(html_block_index, content_group, second_content_group=None):
"""
Set visibility on html blocks to specified groups.
"""
html_block = container_page.xblocks[html_block_index]
html_block.edit_visibility()
if second_content_group:
ComponentVisibilityEditorView(self.browser, html_block.locator).select_option(
second_content_group, save=False
)
ComponentVisibilityEditorView(self.browser, html_block.locator).select_option(content_group)
set_visibility(1, self.content_group_a)
set_visibility(2, self.content_group_b)
set_visibility(3, self.content_group_a, self.content_group_b)
set_visibility(4, 'All Students and Staff') # Does not work without this
container_page.publish_action.click()
def create_cohorts_and_assign_students(self):
"""
Adds 2 manual cohorts, linked to content groups, to the course.
Each cohort is assigned one student.
"""
instructor_dashboard_page = InstructorDashboardPage(self.browser, self.course_id)
instructor_dashboard_page.visit()
cohort_management_page = instructor_dashboard_page.select_cohort_management()
def add_cohort_with_student(cohort_name, content_group, student):
"""
Create cohort and assign student to it.
"""
cohort_management_page.add_cohort(cohort_name, content_group=content_group)
# After adding the cohort, it should automatically be selected
EmptyPromise(
lambda: cohort_name == cohort_management_page.get_selected_cohort(), "Waiting for new cohort"
).fulfill()
cohort_management_page.add_students_to_selected_cohort([student])
add_cohort_with_student("Cohort A", self.content_group_a, self.cohort_a_student_username)
add_cohort_with_student("Cohort B", self.content_group_b, self.cohort_b_student_username)
cohort_management_page.wait_for_ajax()
def test_page_existence(self):
"""
Make sure that the page is accessible.
"""
self._auto_auth(self.cohort_default_student_username, self.cohort_default_student_email, False)
self.courseware_search_page.visit()
def test_cohorted_search_user_a_a_content(self):
"""
Test user can search content restricted to his cohort.
"""
self._auto_auth(self.cohort_a_student_username, self.cohort_a_student_email, False)
self.courseware_search_page.visit()
self.courseware_search_page.search_for_term(self.group_a_html)
assert self.group_a_html in self.courseware_search_page.search_results.html[0]
def test_cohorted_search_user_b_a_content(self):
"""
Test user can not search content restricted to his cohort.
"""
self._auto_auth(self.cohort_b_student_username, self.cohort_b_student_email, False)
self.courseware_search_page.visit()
self.courseware_search_page.search_for_term(self.group_a_html)
assert self.group_a_html not in self.courseware_search_page.search_results.html[0]
def test_cohorted_search_user_default_ab_content(self):
"""
Test user not enrolled in any cohorts can't see any of restricted content.
"""
self._auto_auth(self.cohort_default_student_username, self.cohort_default_student_email, False)
self.courseware_search_page.visit()
self.courseware_search_page.search_for_term(self.group_a_and_b_html)
assert self.group_a_and_b_html not in self.courseware_search_page.search_results.html[0]
def test_cohorted_search_user_default_all_content(self):
"""
Test user can search public content if cohorts used on course.
"""
self._auto_auth(self.cohort_default_student_username, self.cohort_default_student_email, False)
self.courseware_search_page.visit()
self.courseware_search_page.search_for_term(self.visible_to_all_html)
assert self.visible_to_all_html in self.courseware_search_page.search_results.html[0]
def test_cohorted_search_user_staff_all_content(self):
"""
Test staff user can search all public content if cohorts used on course.
"""
self._auto_auth(self.staff_user["username"], self.staff_user["email"], False)
self._goto_staff_page().set_staff_view_mode('Staff')
self.courseware_search_page.search_for_term(self.visible_to_all_html)
assert self.visible_to_all_html in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_a_and_b_html)
assert self.group_a_and_b_html in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_a_html)
assert self.group_a_html in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_b_html)
assert self.group_b_html in self.courseware_search_page.search_results.html[0]
def test_cohorted_search_user_staff_masquerade_student_content(self):
"""
Test staff user can search just student public content if selected from preview menu.
"""
self._auto_auth(self.staff_user["username"], self.staff_user["email"], False)
self._goto_staff_page().set_staff_view_mode('Student')
self.courseware_search_page.search_for_term(self.visible_to_all_html)
assert self.visible_to_all_html in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_a_and_b_html)
assert self.group_a_and_b_html not in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_a_html)
assert self.group_a_html not in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_b_html)
assert self.group_b_html not in self.courseware_search_page.search_results.html[0]
def test_cohorted_search_user_staff_masquerade_cohort_content(self):
"""
Test staff user can search cohort and public content if selected from preview menu.
"""
self._auto_auth(self.staff_user["username"], self.staff_user["email"], False)
self._goto_staff_page().set_staff_view_mode('Student in ' + self.content_group_a)
self.courseware_search_page.search_for_term(self.visible_to_all_html)
assert self.visible_to_all_html in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_a_and_b_html)
assert self.group_a_and_b_html in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_a_html)
assert self.group_a_html in self.courseware_search_page.search_results.html[0]
self.courseware_search_page.clear_search()
self.courseware_search_page.search_for_term(self.group_b_html)
assert self.group_b_html not in self.courseware_search_page.search_results.html[0]
| agpl-3.0 |
eric-stanley/robotframework | src/robot/libraries/DateTime.py | 2 | 28623 | # Copyright 2008-2014 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A test library for handling date and time values.
_DateTime_ is a Robot Framework standard library that supports creating and
converting date and time values (e.g. `Get Current Date`, `Convert Time`),
as well as doing simple calculations with them (e.g. `Subtract Time From Date`,
`Add Time To Time`). It supports dates and times in various formats, and can
also be used by other libraries programmatically.
This library is new in Robot Framework 2.8.5.
= Table of Contents =
- `Terminology`
- `Date formats`
- `Time formats`
- `Millisecond handling`
- `Programmatic usage`
- `Shortcuts`
- `Keywords`
= Terminology =
In the context of this library, _date_ and _time_ generally have following
meanings:
- _date_: An entity with both date and time components but without any
timezone information. For example, '2014-06-11 10:07:42'.
- _time_: A time interval. For example, '1 hour 20 minutes' or '01:20:00'.
This terminology differs from what Python's standard
[https://docs.python.org/2/library/datetime.html|datetime] module uses.
Basically its
[https://docs.python.org/2/library/datetime.html#datetime-objects|datetime] and
[https://docs.python.org/2/library/datetime.html#timedelta-objects|timedelta]
objects match _date_ and _time_ as defined by this library.
= Date formats =
Dates can given to and received from keywords in `timestamp`, `custom
timestamp`, `Python datetime` and `epoch time` formats. These formats are
discussed thoroughly in subsequent sections.
Input format is determined automatically based on the given date except when
using custom timestamps, in which case it needs to be given using
`date_format` argument. Default result format is timestamp, but it can
be overridden using `result_format` argument.
== Timestamp ==
If a date is given as a string, it is always considered to be a timestamp.
If no custom formatting is given using `date_format` argument, the timestamp
is expected to be in [http://en.wikipedia.org/wiki/ISO_8601|ISO 8601] like
format 'YYYY-MM-DD hh:mm:ss.mil', where any non-digit character can be used
as a separator or separators can be omitted altogether. Additionallly,
only the date part is mandatory, all possibly missing time components are
considered to be zeros.
Dates can also be returned in the same 'YYYY-MM-DD hh:mm:ss.mil' format by using
_timestamp_ value with `result_format` argument. This is also the default
format that keywords returning dates use. Milliseconds can be excluded using
`exclude_millis` as explained in `Millisecond handling` section.
Examples:
| ${date1} = | Convert Date | 2014-06-11 10:07:42.000 |
| ${date2} = | Convert Date | 20140611 100742 | result_format=timestamp |
| Should Be Equal | ${date1} | ${date2} |
| ${date} = | Convert Date | 20140612 12:57 | exclude_millis=yes |
| Should Be Equal | ${date} | 2014-06-12 12:57:00 |
== Custom timestamp ==
It is possible to use custom timestamps in both input and output.
The custom format is same as accepted by Python's
[https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior|
datatime.strptime() function]. For example, the default timestamp discussed
in the previous section would match '%Y-%m-%d %H:%M:%S.%f'.
When using a custom timestamp in input, it must be specified using `date_format`
argument. The actual input value must be a string that matches the specified
format exactly. When using a custom timestamp in output, it must be given
using `result_format` argument.
Examples:
| ${date} = | Convert Date | 28.05.2014 12:05 | date_format=%d.%m.%Y %H:%M |
| Should Be Equal | ${date} | 2014-05-28 12:05:00.000 |
| ${date} = | Convert Date | ${date} | result_format=%d.%m.%Y |
| Should Be Equal | ${date} | 28.05.2014 |
== Python datetime ==
Python's standard
[https://docs.python.org/2/library/datetime.html#datetime.datetime|datetime]
objects can be used both in input and output. In input they are recognized
automatically, and in output it is possible to get them by giving _datetime_
value to `result_format` argument.
One nice benefit with datetime objects is that they have different time
components available as attributes that can be easily accessed using the
extended variable syntax.
Examples:
| ${datetime} = | Convert Date | 2014-06-11 10:07:42.123 | datetime |
| Should Be Equal As Integers | ${datetime.year} | 2014 |
| Should Be Equal As Integers | ${datetime.month} | 6 |
| Should Be Equal As Integers | ${datetime.day} | 11 |
| Should Be Equal As Integers | ${datetime.hour} | 10 |
| Should Be Equal As Integers | ${datetime.minute} | 7 |
| Should Be Equal As Integers | ${datetime.second} | 42 |
| Should Be Equal As Integers | ${datetime.microsecond} | 123000 |
== Epoch time ==
Epoch time is the time in seconds since the
[http://en.wikipedia.org/wiki/Unix_time|UNIX epoch] i.e. 00:00:00.000 (UTC)
1 January 1970. To give a date in epoch time, it must be given as a number
(integer or float), not as a string. To return a date in epoch time,
it is possible to use _epoch_ value with `result_format` argument.
Epoch time is returned as a floating point number.
Notice that epoch time itself is independent on timezones and thus same
around the world at a certain time. What local time a certain epoch time
matches obviously then depends on the timezone. For example, examples below
were tested in Finland but verifications would fail on other timezones.
Examples:
| ${date} = | Convert Date | ${1000000000} |
| Should Be Equal | ${date} | 2001-09-09 04:46:40.000 |
| ${date} = | Convert Date | 2014-06-12 13:27:59.279 | epoch |
| Should Be Equal | ${date} | ${1402568879.279} |
= Time formats =
Similarly as dates, times can be given to and received from keywords in
various different formats. Supported formats are `number`, `time string`
(verbose and compact), `timer string` and `Python timedelta`.
Input format for time is always determined automatically based on the input.
Result format is number by default, but it can be customised using
`result_format` argument.
== Number ==
Time given as a number is interpreted to be seconds. It can be given
either as an integer or a float, or it can be a string that can be converted
to a number.
To return a time as a number, `result_format` argument must be _number_,
which is also the default. Returned number is always a float.
Examples:
| ${time} = | Convert Time | 3.14 |
| Should Be Equal | ${time} | ${3.14} |
| ${time} = | Convert Time | ${time} | result_format=number |
| Should Be Equal | ${time} | ${3.14} |
== Time string ==
Time strings are strings in format like '1 minutes 42 seconds' or '1min 42s'.
The basic idea of this format is having first a number and then a text
specifying what time that number represents. Numbers can be either
integers or floating point numbers, the whole format is case and space
insensitive, and it is possible to add a minus prefix to specify negative
times. The available time specifiers are:
- days, day, d
- hours, hour, h
- minutes, minute, mins, min, m
- seconds, second, secs, sec, s
- milliseconds, millisecond, millis, ms
When returning a time string, it is possible to select between _verbose_
and _compact_ representations using `result_format` argument. The verbose
format uses long specifiers 'day', 'hour', 'minute', 'second' and
'millisecond', and adds 's' at the end when needed. The compact format uses
shorter specifiers 'd', 'h', 'min', 's' and 'ms', and even drops a space
between the number and the specifier.
Examples:
| ${time} = | Convert Time | 1 minute 42 seconds |
| Should Be Equal | ${time} | ${102} |
| ${time} = | Convert Time | 4200 | verbose |
| Should Be Equal | ${time} | 1 hour 10 minutes |
| ${time} = | Convert Time | - 1.5 hours | compact |
| Should Be Equal | ${time} | - 1h 30min |
== Timer string ==
Timer string is a string given in timer like format 'hh:mm:ss.mil'. In this
format both hour and millisecond parts are optional, leading and trailing
zeros can be left out when they are not meaningful, and negative times can
be represented by adding a minus prefix.
To return a time as timer string, `result_format` argument must be given
value _timer_. Timer strings are by default returned in full _hh:mm:ss.mil_
format, but milliseconds can be excluded using `exclude_millis` as explained
in `Millisecond handling` section.
Examples:
| ${time} = | Convert Time | 01:42 |
| Should Be Equal | ${time} | ${102} |
| ${time} = | Convert Time | 01:10:00.123 |
| Should Be Equal | ${time} | ${4200.123} |
| ${time} = | Convert Time | 102 | timer |
| Should Be Equal | ${time} | 00:01:42.000 |
| ${time} = | Convert Time | -101.567 | timer | exclude_millis=yes |
| Should Be Equal | ${time} | -00:01:42 |
== Python timedelta ==
Python's standard
[https://docs.python.org/2/library/datetime.html#datetime.timedelta|timedelta]
objects are also supported both in input and in output. In input they are
recognized automatically, and in output it is possible to receive them by
giving _timedelta_ value to `result_format` argument.
Examples:
| ${timedelta} = | Convert Time | 01:10:02.123 | timedelta |
| Should Be Equal | ${timedelta.total_seconds()} | ${4202.123} |
= Millisecond handling =
This library handles dates and times internally using the precision of the
given input. With `timestamp`, `time string`, and `timer string` result
formats seconds are, however, rounded to millisecond accuracy. Milliseconds
may also be included even if there would be none.
All keywords returning dates or times have an option to leave milliseconds
out by giving any value considered true (e.g. any non-empty string) to
`exclude_millis` argument. When this option is used, seconds in returned
dates and times are rounded to the nearest full second. With `timestamp`
and `timer string` result formats, milliseconds will also be removed from
the returned string altogether.
Examples:
| ${date} = | Convert Date | 2014-06-11 10:07:42 |
| Should Be Equal | ${date} | 2014-06-11 10:07:42.000 |
| ${date} = | Convert Date | 2014-06-11 10:07:42.500 | exclude_millis=yes |
| Should Be Equal | ${date} | 2014-06-11 10:07:43 |
| ${dt} = | Convert Date | 2014-06-11 10:07:42.500 | datetime | exclude_millis=yes |
| Should Be Equal | ${dt.second} | ${43} |
| Should Be Equal | ${dt.microsecond} | ${0} |
| ${time} = | Convert Time | 102 | timer |
| Should Be Equal | ${time} | 00:01:42.000 | |
| ${time} = | Convert Time | 102.567 | timer | exclude_millis=true |
| Should Be Equal | ${time} | 00:01:43 | |
= Programmatic usage =
In addition to be used as normal library, this library is intended to
provide a stable API for other libraries to use if they want to support
same date and time formats as this library. All the provided keywords
are available as functions that can be easily imported:
| from robot.libraries.DateTime import convert_time
|
| def example_keyword(timeout):
| seconds = convert_time(timeout)
| # ...
Additionally helper classes _Date_ and _Time_ can be used directly:
| from robot.libraries.DateTime import Date, Time
|
| def example_keyword(date, interval):
| date = Date(date).convert('datetime')
| interval = Time(interval).convert('number')
| # ...
"""
from datetime import datetime, timedelta
import time
import sys
import re
from robot.version import get_version
from robot.utils import elapsed_time_to_string, secs_to_timestr, timestr_to_secs
__version__ = get_version()
__all__ = ['convert_time', 'convert_date', 'subtract_date_from_date',
'subtract_time_from_date', 'subtract_time_from_time',
'add_time_to_time', 'add_time_to_date', 'get_current_date']
def get_current_date(time_zone='local', increment=0,
result_format='timestamp', exclude_millis=False):
"""Returns current local or UTC time with an optional increment.
Arguments:
- _time_zone:_ Get the current time on this time zone. Currently only
'local' (default) and 'UTC' are supported.
- _increment:_ Optional time increment to add to the returned date in
one of the supported `time formats`. Can be negative.
- _result_format:_ Format of the returned date (see `date formats`).
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
Examples:
| ${date} = | Get Current Date |
| Should Be Equal | ${date} | 2014-06-12 20:00:58.946 |
| ${date} = | Get Current Date | UTC |
| Should Be Equal | ${date} | 2014-06-12 17:00:58.946 |
| ${date} = | Get Current Date | increment=02:30:00 |
| Should Be Equal | ${date} | 2014-06-12 22:30:58.946 |
| ${date} = | Get Current Date | UTC | - 5 hours |
| Should Be Equal | ${date} | 2014-06-12 12:00:58.946 |
| ${date} = | Get Current Date | result_format=datetime |
| Should Be Equal | ${date.year} | ${2014} |
| Should Be Equal | ${date.month} | ${6} |
"""
if time_zone.upper() == 'LOCAL':
dt = datetime.now()
elif time_zone.upper() == 'UTC':
dt = datetime.utcnow()
else:
raise ValueError("Unsupported timezone '%s'." % time_zone)
date = Date(dt) + Time(increment)
return date.convert(result_format, millis=not exclude_millis)
def convert_date(date, result_format='timestamp', exclude_millis=False,
date_format=None):
"""Converts between supported `date formats`.
Arguments:
- _date:_ Date in one of the supported `date formats`.
- _result_format:_ Format of the returned date.
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
- _date_format:_ Specifies possible `custom timestamp` format.
Examples:
| ${date} = | Convert Date | 20140528 12:05:03.111 |
| Should Be Equal | ${date} | 2014-05-28 12:05:03.111 |
| ${date} = | Convert Date | ${date} | epoch |
| Should Be Equal | ${date} | ${1401267903.111} |
| ${date} = | Convert Date | 5.28.2014 12:05 | exclude_millis=yes | date_format=%m.%d.%Y %H:%M |
| Should Be Equal | ${date} | 2014-05-28 12:05:00 |
"""
return Date(date, date_format).convert(result_format,
millis=not exclude_millis)
def convert_time(time, result_format='number', exclude_millis=False):
"""Converts between supported `time formats`.
Arguments:
- _time:_ Time in one of the supported `time formats`.
- _result_format:_ Format of the returned time.
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
Examples:
| ${time} = | Convert Time | 10 seconds |
| Should Be Equal | ${time} | ${10} |
| ${time} = | Convert Time | 1:00:01 | verbose |
| Should Be Equal | ${time} | 1 hour 1 second |
| ${time} = | Convert Time | ${3661.5} | timer | exclude_milles=yes |
| Should Be Equal | ${time} | 01:01:02 |
"""
return Time(time).convert(result_format, millis=not exclude_millis)
def subtract_date_from_date(date1, date2, result_format='number',
exclude_millis=False, date1_format=None,
date2_format=None):
"""Subtracts date from another date and returns time between.
Arguments:
- _date1:_ Date to subtract another date from in one of the
supported `date formats`.
- _date2:_ Date that is subtracted in one of the supported
`date formats`.
- _result_format:_ Format of the returned time (see `time formats`).
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
- _date1_format:_ Specifies possible `custom timestamp` format of _date1_.
- _date2_format:_ Specifies possible `custom timestamp` format of _date2_.
Examples:
| ${time} = | Subtract Date From Date | 2014-05-28 12:05:52 | 2014-05-28 12:05:10 |
| Should Be Equal | ${time} | ${42} |
| ${time} = | Subtract Date From Date | 2014-05-28 12:05:52 | 2014-05-27 12:05:10 | verbose |
| Should Be Equal | ${time} | 1 day 42 seconds |
"""
time = Date(date1, date1_format) - Date(date2, date2_format)
return time.convert(result_format, millis=not exclude_millis)
def add_time_to_date(date, time, result_format='timestamp',
exclude_millis=False, date_format=None):
"""Adds time to date and returns the resulting date.
Arguments:
- _date:_ Date to add time to in one of the supported
`date formats`.
- _time:_ Time that is added in one of the supported
`time formats`.
- _result_format:_ Format of the returned date.
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
- _date_format:_ Specifies possible `custom timestamp` format of _date_.
Examples:
| ${date} = | Add Time To Date | 2014-05-28 12:05:03.111 | 7 days |
| Should Be Equal | ${date} | 2014-06-04 12:05:03.111 | |
| ${date} = | Add Time To Date | 2014-05-28 12:05:03.111 | 01:02:03:004 |
| Should Be Equal | ${date} | 2014-05-28 13:07:06.115 |
"""
date = Date(date, date_format) + Time(time)
return date.convert(result_format, millis=not exclude_millis)
def subtract_time_from_date(date, time, result_format='timestamp',
exclude_millis=False, date_format=None):
"""Subtracts time from date and returns the resulting date.
Arguments:
- _date:_ Date to subtract time from in one of the supported
`date formats`.
- _time:_ Time that is subtracted in one of the supported
`time formats`.
- _result_format:_ Format of the returned date.
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
- _date_format:_ Specifies possible `custom timestamp` format of _date_.
Examples:
| ${date} = | Subtract Time From Date | 2014-06-04 12:05:03.111 | 7 days |
| Should Be Equal | ${date} | 2014-05-28 12:05:03.111 |
| ${date} = | Subtract Time From Date | 2014-05-28 13:07:06.115 | 01:02:03:004 |
| Should Be Equal | ${date} | 2014-05-28 12:05:03.111 |
"""
date = Date(date, date_format) - Time(time)
return date.convert(result_format, millis=not exclude_millis)
def add_time_to_time(time1, time2, result_format='number',
exclude_millis=False):
"""Adds time to another time and returns the resulting time.
Arguments:
- _time1:_ First time in one of the supported `time formats`.
- _time2:_ Second time in one of the supported `time formats`.
- _result_format:_ Format of the returned time.
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
Examples:
| ${time} = | Add Time To Time | 1 minute | 42 |
| Should Be Equal | ${time} | ${102} |
| ${time} = | Add Time To Time | 3 hours 5 minutes | 01:02:03 | timer | exclude_millis=yes |
| Should Be Equal | ${time} | 04:07:03 |
"""
time = Time(time1) + Time(time2)
return time.convert(result_format, millis=not exclude_millis)
def subtract_time_from_time(time1, time2, result_format='number',
exclude_millis=False):
"""Subtracts time from another time and returns the resulting time.
Arguments:
- _time1:_ Time to subtract another time from in one of
the supported `time formats`.
- _time2:_ Time to subtract in one of the supported `time formats`.
- _result_format:_ Format of the returned time.
- _exclude_millis:_ When set to any true value, rounds and drops
milliseconds as explained in `millisecond handling`.
Examples:
| ${time} = | Subtract Time From Time | 00:02:30 | 100 |
| Should Be Equal | ${time} | ${50} |
| ${time} = | Subtract Time From Time | ${time} | 1 minute | compact |
| Should Be Equal | ${time} | - 10s |
"""
time = Time(time1) - Time(time2)
return time.convert(result_format, millis=not exclude_millis)
class Date(object):
def __init__(self, date, input_format=None):
self.seconds = self._convert_date_to_seconds(date, input_format)
def _convert_date_to_seconds(self, date, input_format):
if isinstance(date, basestring):
return self._string_to_epoch(date, input_format)
elif isinstance(date, datetime):
return self._mktime_with_millis(date)
elif isinstance(date, (int, long, float)):
return float(date)
raise ValueError("Unsupported input '%s'." % date)
def _string_to_epoch(self, ts, input_format):
if not input_format:
ts = self._normalize_timestamp(ts)
input_format = '%Y-%m-%d %H:%M:%S.%f'
if self._need_to_handle_f_directive(input_format):
return self._handle_un_supported_f_directive(ts, input_format)
return self._mktime_with_millis(datetime.strptime(ts, input_format))
def _need_to_handle_f_directive(self, format):
if '%f' not in format:
return False
if sys.version_info < (2, 6):
return True
# https://ironpython.codeplex.com/workitem/34706
# http://bugs.jython.org/issue2166
return sys.platform == 'cli' or sys.platform.startswith('java')
def _normalize_timestamp(self, date):
ts = ''.join(d for d in date if d.isdigit())
if len(ts) < 8:
raise ValueError("Invalid timestamp '%s'." % date)
ts = ts.ljust(20, '0')
return '%s-%s-%s %s:%s:%s.%s' % (ts[:4], ts[4:6], ts[6:8], ts[8:10],
ts[10:12], ts[12:14], ts[14:])
def _handle_un_supported_f_directive(self, ts, input_format):
input_format = self._remove_f_from_format(input_format)
micro = re.search('\d+$', ts).group(0)
ts = ts[:-len(micro)]
epoch = time.mktime(time.strptime(ts, input_format))
epoch += float(micro) / 10**len(micro)
return epoch
def _remove_f_from_format(self, format):
if not format.endswith('%f'):
raise ValueError('%f directive is supported only at the end of '
'the format string on this Python interpreter.')
return format[:-2]
def _mktime_with_millis(self, dt):
return time.mktime(dt.timetuple()) + dt.microsecond / 10.0**6
def convert(self, format, millis=True):
seconds = self.seconds if millis else round(self.seconds)
if '%' in format:
return self._convert_to_custom_timestamp(seconds, format)
try:
result_converter = getattr(self, '_convert_to_%s' % format.lower())
except AttributeError:
raise ValueError("Unknown format '%s'." % format)
return result_converter(seconds, millis)
def _convert_to_custom_timestamp(self, seconds, format):
format = str(format) # Needed by Python 2.5
dt = self._datetime_from_seconds(seconds)
if not self._need_to_handle_f_directive(format):
return dt.strftime(format)
format = self._remove_f_from_format(format)
micro = round(seconds % 1 * 10**6)
return '%s%06d' % (dt.strftime(format), micro)
def _convert_to_timestamp(self, seconds, millis=True):
milliseconds = int(round(seconds % 1 * 1000))
if milliseconds == 1000:
seconds = round(seconds)
milliseconds = 0
dt = self._datetime_from_seconds(seconds)
ts = dt.strftime('%Y-%m-%d %H:%M:%S')
if millis:
ts += '.%03d' % milliseconds
return ts
def _datetime_from_seconds(self, ts):
# Jython and IronPython handle floats incorrectly. For example:
# datetime.fromtimestamp(1399410716.123).microsecond == 122999
dt = datetime.fromtimestamp(ts)
return dt.replace(microsecond=int(round(ts % 1 * 10**6)))
def _convert_to_epoch(self, seconds, millis=True):
return seconds
def _convert_to_datetime(self, seconds, millis=True):
return self._datetime_from_seconds(seconds)
def __add__(self, other):
if isinstance(other, Time):
return Date(self.seconds + other.seconds)
raise TypeError('Can only add Time to Date, not %s.'
% type(other).__name__)
def __sub__(self, other):
if isinstance(other, Date):
return Time(self.seconds - other.seconds)
if isinstance(other, Time):
return Date(self.seconds - other.seconds)
raise TypeError('Can only subtract Date or Time from Date, not %s.'
% type(other).__name__)
class Time(object):
def __init__(self, time):
self.seconds = self._convert_time_to_seconds(time)
def _convert_time_to_seconds(self, time):
if isinstance(time, timedelta):
# timedelta.total_seconds() is new in Python 2.7
return (time.days * 24 * 60 * 60 + time.seconds +
time.microseconds / 1000000.0)
return timestr_to_secs(time, round_to=None)
def convert(self, format, millis=True):
try:
result_converter = getattr(self, '_convert_to_%s' % format.lower())
except AttributeError:
raise ValueError("Unknown format '%s'." % format)
seconds = self.seconds if millis else round(self.seconds)
return result_converter(seconds, millis)
def _convert_to_number(self, seconds, millis=True):
return seconds
def _convert_to_verbose(self, seconds, millis=True):
return secs_to_timestr(seconds)
def _convert_to_compact(self, seconds, millis=True):
return secs_to_timestr(seconds, compact=True)
def _convert_to_timer(self, seconds, millis=True):
return elapsed_time_to_string(seconds * 1000, include_millis=millis)
def _convert_to_timedelta(self, seconds, millis=True):
return timedelta(seconds=seconds)
def __add__(self, other):
if isinstance(other, Time):
return Time(self.seconds + other.seconds)
raise TypeError('Can only add Time to Time, not %s.'
% type(other).__name__)
def __sub__(self, other):
if isinstance(other, Time):
return Time(self.seconds - other.seconds)
raise TypeError('Can only subtract Time from Time, not %s.'
% type(other).__name__)
| apache-2.0 |
kobejean/tensorflow | tensorflow/contrib/distribute/python/tpu_strategy.py | 1 | 20404 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TPU Distribution Strategy.
This is experimental. It's not ready for general use.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distribute.python import cross_tower_ops as cross_tower_ops_lib
from tensorflow.contrib.distribute.python import one_device_strategy
from tensorflow.contrib.distribute.python import values
from tensorflow.contrib.tpu.python.ops import tpu_ops
from tensorflow.contrib.tpu.python.tpu import tpu
from tensorflow.contrib.tpu.python.tpu import tpu_system_metadata as tpu_system_metadata_lib
from tensorflow.contrib.tpu.python.tpu import training_loop
from tensorflow.python.eager import context
from tensorflow.python.eager import tape
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.training import device_util
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.util import nest
_TPU_INITIALIZE_SYSTEM_COLLECTION = "TPU_STRATEGY_INITIALIZE"
def get_tpu_system_metadata(tpu_cluster_resolver):
"""Retrieves TPU system metadata given a TPUClusterResolver."""
master = tpu_cluster_resolver.master()
# pylint: disable=protected-access
cluster_spec = tpu_cluster_resolver.cluster_spec()
cluster_def = cluster_spec.as_cluster_def() if cluster_spec else None
tpu_system_metadata = (
tpu_system_metadata_lib._query_tpu_system_metadata(
master,
cluster_def=cluster_def,
query_topology=False))
return tpu_system_metadata
# TODO(jhseu): Deduplicate with MirroredStrategy?
def _create_tpu_mirrored_variable(devices, real_mirrored_creator, *args,
**kwargs): # pylint: disable=g-missing-docstring
# Figure out what collections this variable should be added to.
# We'll add the TPUMirroredVariable to those collections instead.
collections = kwargs.pop("collections", None)
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
kwargs["collections"] = []
# TODO(jhseu): Should we have different behavior for different
# synchronization settings?
# Get aggregation value
# TODO(jhseu): Support aggregation in a tower context.
aggregation = kwargs.pop("aggregation", vs.VariableAggregation.NONE)
if aggregation not in [
vs.VariableAggregation.NONE,
vs.VariableAggregation.SUM,
vs.VariableAggregation.MEAN,
vs.VariableAggregation.ONLY_FIRST_TOWER,
]:
raise ValueError("Invalid variable aggregation mode: {} for variable: {}"
.format(aggregation, kwargs["name"]))
# Ignore user-specified caching device, not needed for mirrored variables.
kwargs.pop("caching_device", None)
# TODO(josh11b,apassos): It would be better if variable initialization
# was never recorded on the tape instead of having to do this manually
# here.
with tape.stop_recording():
index = real_mirrored_creator(devices, *args, **kwargs)
result = values.TPUMirroredVariable(index, index[devices[0]], aggregation)
if not context.executing_eagerly():
g = ops.get_default_graph()
# If "trainable" is True, next_creator() will add the member variables
# to the TRAINABLE_VARIABLES collection, so we manually remove
# them and replace with the MirroredVariable. We can't set
# "trainable" to False for next_creator() since that causes functions
# like implicit_gradients to skip those variables.
if kwargs.get("trainable", True):
collections.append(ops.GraphKeys.TRAINABLE_VARIABLES)
l = g.get_collection_ref(ops.GraphKeys.TRAINABLE_VARIABLES)
for v in index.values():
l.remove(v)
g.add_to_collections(collections, result)
return result
# TODO(jhseu): Stop inheriting from OneDeviceStrategy.
class TPUStrategy(one_device_strategy.OneDeviceStrategy):
"""Experimental TPU distribution strategy implementation."""
def __init__(self, tpu_cluster_resolver, steps_per_run, num_cores=None):
"""Initializes the TPUStrategy object.
Args:
tpu_cluster_resolver: A tf.contrib.cluster_resolver.TPUClusterResolver,
which provides information about the TPU cluster.
steps_per_run: Number of steps to run on device before returning to the
host. Note that this can have side-effects on performance, hooks,
metrics, summaries etc.
This parameter is only used when Distribution Strategy is used with
estimator or keras.
num_cores: Number of cores to use on the TPU. If None specified, then
auto-detect the cores and topology of the TPU system.
"""
# TODO(sourabhbajaj): OneDeviceStrategy should be initialized with the
# master node fetched from the cluster resolver.
super(TPUStrategy, self).__init__('/device:CPU:0')
self._tpu_cluster_resolver = tpu_cluster_resolver
self._tpu_metadata = get_tpu_system_metadata(self._tpu_cluster_resolver)
# TODO(sourabhbajaj): Change this from num_cores to metadata_override
self._num_cores_override = num_cores
# TODO(jhseu): Switch to DeviceAssignment to support pods and model
# parallelism.
device_map = {d.name: i for i, d in enumerate(self._tpu_metadata.devices)
if "device:TPU:" in d.name}
self._device_index = values.PerDevice(device_map)
self._tpu_devices = sorted(device_map.keys())
# Only create variables for the number of towers we're running.
self._tpu_devices = self._tpu_devices[:self.num_towers]
# TODO(sourabhbajaj): Remove this once performance of running one step
# at a time is comparable to multiple steps.
self.steps_per_run = steps_per_run
def _get_enqueue_op_per_host(self, host_id, iterator, input_shapes,
iterations):
"""Create an enqueue op for a single host identified using host_id.
The while_loop op returned will run `iterations` times and in each run
enqueue batches for each shard.
Args:
host_id: integer, id of the host to run the enqueue ops on.
iterator: `tf.data` iterator to read the input data.
input_shapes: shape of inputs to be enqueue on the queue. This is same as
the value of `nest.flatten(iterator.output_shapes)`.
iterations: integer, number of iterations to be run; determines the
number of batches to be enqueued.
Returns:
while_loop_op running `iterations` times; in each run we enqueue a batch
on the infeed queue from the host with id `host_id` for each device shard.
"""
host = self.get_host_cpu_device(host_id)
def _infeed_enqueue_ops_fn():
"""Enqueue ops for one iteration."""
control_deps = []
sharded_inputs = []
enqueue_ops = []
with ops.device(host):
for _ in range(self.num_towers_per_host):
# Use control dependencies to ensure a deterministic ordering.
with ops.control_dependencies(control_deps):
inputs = nest.flatten(iterator.get_next())
control_deps.extend(inputs)
sharded_inputs.append(inputs)
for core_id, shard_input in enumerate(sharded_inputs):
enqueue_ops.append(
tpu_ops.infeed_enqueue_tuple(
inputs=shard_input,
shapes=input_shapes,
device_ordinal=core_id))
return enqueue_ops
def enqueue_ops_loop_body(i):
"""Callable for the loop body of the while_loop instantiated below."""
with ops.control_dependencies(_infeed_enqueue_ops_fn()):
return i + 1
with ops.device(host):
enqueue_op_per_host = control_flow_ops.while_loop(
lambda i: i < iterations,
enqueue_ops_loop_body,
[constant_op.constant(0)],
parallel_iterations=1)
return enqueue_op_per_host
def distribute_dataset(self, dataset_fn):
# TODO(priyag): Perhaps distribute across cores here.
return self._call_dataset_fn(dataset_fn)
# TODO(priyag): Deal with OutOfRange errors once b/111349762 is fixed.
# TODO(sourabhbajaj): Remove the initial_loop_values parameter when we have
# a mechanism to infer the outputs of `fn`. Pending b/110550782.
def _run_steps_on_dataset(self, fn, iterator, iterations,
initial_loop_values=None):
shapes = nest.flatten(iterator.output_shapes)
if any([not s.is_fully_defined() for s in shapes]):
raise ValueError(
'TPU currently requires fully defined shapes. Either use '
'set_shape() on the input tensors or use '
'dataset.batch(..., drop_remainder=True).')
types = nest.flatten(iterator.output_types)
enqueue_ops = [
self._get_enqueue_op_per_host(host_id, iterator, shapes, iterations)
for host_id in range(self.num_hosts)]
def dequeue_fn():
dequeued = tpu_ops.infeed_dequeue_tuple(dtypes=types, shapes=shapes)
return nest.pack_sequence_as(iterator.output_shapes, dequeued)
# Wrap `fn` for repeat.
if initial_loop_values is None:
initial_loop_values = {}
initial_loop_values = nest.flatten(initial_loop_values)
ctx = values.MultiStepContext()
def run_fn(*args, **kwargs):
"""Single step on the TPU device."""
del args, kwargs
fn_inputs = dequeue_fn()
if not isinstance(fn_inputs, tuple):
fn_inputs = (fn_inputs,)
fn_result = fn(ctx, *fn_inputs)
flat_last_step_outputs = nest.flatten(ctx.last_step_outputs)
if flat_last_step_outputs:
with ops.control_dependencies([fn_result]):
return [array_ops.identity(f) for f in flat_last_step_outputs]
else:
return fn_result
# TODO(sourabhbajaj): The input to while loop should be based on the output
# type of the step_fn
def iterate_on_tpu():
return training_loop.repeat(iterations, run_fn, initial_loop_values)
# We capture the control_flow_context at this point, before we run `fn`
# inside a while_loop and TPU replicate context. This is useful in cases
# where we might need to exit these contexts and get back to the outer
# context to do some things, for e.g. create an op which should be
# evaluated only once at the end of the loop on the host. One such usage
# is in creating metrics' value op.
self._outer_control_flow_context = (
ops.get_default_graph()._get_control_flow_context()) # pylint: disable=protected-access
replicate_inputs = [[]] * self.num_towers
replicate_outputs = tpu.replicate(iterate_on_tpu, replicate_inputs)
del self._outer_control_flow_context
ctx.run_op = control_flow_ops.group(replicate_outputs, enqueue_ops)
# Filter out any ops from the outputs, typically this would be the case
# when there were no tensor outputs.
last_step_tensor_outputs = [x for x in replicate_outputs
if not isinstance(x, ops.Operation)]
# Outputs are currently of the structure (grouped by device)
# [[output0_device0, output1_device0, output2_device0],
# [output0_device1, output1_device1, output2_device1]]
# Convert this to the following structure instead: (grouped by output)
# [[output0_device0, output0_device1],
# [output1_device0, output1_device1],
# [output2_device0, output2_device1]]
last_step_tensor_outputs = [list(x) for x in zip(*last_step_tensor_outputs)]
# Convert replicate_outputs to the original dict structure of
# last_step_outputs.
last_step_tensor_outputs_dict = nest.pack_sequence_as(
ctx.last_step_outputs, last_step_tensor_outputs)
for (name, aggregation) in ctx._last_step_outputs_aggregations.items(): # pylint: disable=protected-access
output = last_step_tensor_outputs_dict[name]
# For outputs that have already been aggregated, take the first value
# from the list as each value should be the same. Else return the full
# list of values.
if aggregation is not variables_lib.VariableAggregation.NONE:
# TODO(priyag): Should this return the element or a list with 1 element
last_step_tensor_outputs_dict[name] = output[0]
ctx._set_last_step_outputs(last_step_tensor_outputs_dict) # pylint: disable=protected-access
return ctx
def _call_for_each_tower(self, fn, *args, **kwargs):
# TODO(jhseu): Consider making it so call_for_each_tower implies that we're
# in a tpu.rewrite(), and update TPUMirroredVariable accordingly.
kwargs.pop('run_concurrently', None)
with one_device_strategy._OneDeviceTowerContext(self): # pylint: disable=protected-access
return fn(*args, **kwargs)
def initialize(self):
if context.executing_eagerly():
# TODO(priyag): Add appopriate call here when eager is supported for TPUs.
raise NotImplementedError('Eager mode not supported in TPUStrategy.')
else:
# TODO(jhseu): We need this hack because DistributionStrategies must be
# pickleable for copy.deepcopy(). Remove when initialize_system goes away.
graph = ops.get_default_graph()
tpu_init = graph.get_collection(_TPU_INITIALIZE_SYSTEM_COLLECTION)
if tpu_init:
return tpu_init
graph.add_to_collection(_TPU_INITIALIZE_SYSTEM_COLLECTION,
tpu.initialize_system())
return graph.get_collection(_TPU_INITIALIZE_SYSTEM_COLLECTION)
def finalize(self):
if context.executing_eagerly():
# TODO(priyag): Add appopriate call here when eager is supported for TPUs.
raise NotImplementedError('Eager mode not supported in TPUStrategy.')
else:
return [tpu.shutdown_system()]
def _get_devices_from(self, colocate_with=None):
# TODO(jhseu): Change this when we support model parallelism.
return self._tpu_devices
def _create_variable(self, next_creator, *args, **kwargs):
"""Create a TPUMirroredVariable. See `DistributionStrategy.scope`."""
colocate_with = kwargs.pop("colocate_with", None)
devices = self._get_devices_from(colocate_with)
def _real_mirrored_creator(devices, *args, **kwargs): # pylint: disable=g-missing-docstring
index = {}
for i, d in enumerate(devices):
with ops.device(d):
if i > 0:
# Give replicas meaningful distinct names:
var0name = index[devices[0]].name.split(":")[0]
# We append a / to variable names created on towers with id > 0 to
# ensure that we ignore the name scope and instead use the given
# name as the absolute name of the variable.
kwargs["name"] = "%s/replica_%d/" % (var0name, i)
# Initialize replicas with the same value:
if context.executing_eagerly():
kwargs["initial_value"] = array_ops.identity(
index[devices[0]].value())
else:
def initial_value_fn(device=d):
with ops.device(device):
return array_ops.identity(index[devices[0]].initial_value)
kwargs["initial_value"] = initial_value_fn
with context.context().device_policy(context.DEVICE_PLACEMENT_SILENT):
v = next_creator(*args, **kwargs)
assert not isinstance(v, values.TPUMirroredVariable)
index[d] = v
return index
return _create_tpu_mirrored_variable(devices, _real_mirrored_creator, *args,
**kwargs)
def _reduce(self, aggregation, value, destinations):
if values._enclosing_tpu_context() is not None: # pylint: disable=protected-access
if aggregation == vs.VariableAggregation.MEAN:
# TODO(jhseu): Revisit once we support model-parallelism.
value *= (1. / self.num_towers)
elif aggregation != vs.VariableAggregation.SUM:
raise NotImplementedError(
"Currently only support sum & mean in TPUStrategy.")
return tpu_ops.cross_replica_sum(value)
# Validate that the destination is same as the host device
# Note we don't do this when in replicate context as the reduction is
# performed on the TPU device itself.
devices = cross_tower_ops_lib.get_devices_from(destinations)
if len(devices) == 1:
assert device_util.canonicalize(devices[0]) == device_util.canonicalize(
self.get_host_cpu_device(0))
else:
raise ValueError('Multiple devices are not supported for TPUStrategy')
if aggregation == vs.VariableAggregation.ONLY_FIRST_TOWER:
return value[0]
output = math_ops.add_n(value)
if aggregation == vs.VariableAggregation.MEAN:
return output * (1. / len(value))
return output
def _update(self, var, fn, *args, **kwargs):
# TODO(jhseu): Consider supporting grouped==False.
assert isinstance(var, values.TPUMirroredVariable)
if values._enclosing_tpu_context() is not None: # pylint: disable=protected-access
return fn(var, *args, **kwargs)
# Otherwise, we revert to MirroredStrategy behavior and update each variable
# directly.
updates = {}
for d, v in var._index.items(): # pylint: disable=protected-access
name = "update_%d" % self._device_index.get(d)
with ops.device(d), distribute_lib.UpdateContext(d), ops.name_scope(name):
# If args and kwargs are not mirrored, the value is returned as is.
updates[d] = fn(v,
*values.select_device_mirrored(d, args),
**values.select_device_mirrored(d, kwargs))
# Make a single control dependency to keep the variables mirrored. If one
# assignment is fetched, then run all assignments.
sorted_keys = sorted(updates.keys())
update_tuple = control_flow_ops.tuple([updates[d] for d in sorted_keys])
for i, d in enumerate(sorted_keys):
updates[d] = update_tuple[i]
return values.regroup(updates, values.Mirrored)
def read_var(self, var):
assert isinstance(var, values.TPUMirroredVariable)
return var.read_value()
def _unwrap(self, value):
if isinstance(value, list):
return value
return [value]
@property
def num_towers(self):
return self._num_cores_override or self._tpu_metadata.num_cores
@property
def num_hosts(self):
return self._tpu_metadata.num_hosts
@property
def num_towers_per_host(self):
return self._tpu_metadata.num_of_cores_per_host
@property
def between_graph(self):
return False
@property
def should_init(self):
return True
@property
def should_checkpoint(self):
return True
@property
def should_save_summary(self):
return True
@property
def worker_devices(self):
return self._tpu_devices
@property
def parameter_devices(self):
return self._tpu_devices
def get_host_cpu_device(self, host_id):
if self._tpu_cluster_resolver.get_master() in ('', 'local'):
return '/replica:0/task:0/device:CPU:0'
job_name = self._tpu_cluster_resolver.get_job_name() or 'tpu_worker'
return '/job:%s/task:%d/device:CPU:0' % (job_name, host_id)
def configure(self,
session_config=None,
cluster_spec=None,
task_type=None,
task_id=None):
del cluster_spec, task_type, task_id
if session_config:
session_config.isolate_session_state = True
cluster_spec = self._tpu_cluster_resolver.cluster_spec()
if cluster_spec:
session_config.cluster_def.CopyFrom(cluster_spec.as_cluster_def())
| apache-2.0 |
grlee77/nipype | nipype/interfaces/ants/utils.py | 9 | 7912 | """ANTS Apply Transforms interface
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
import os
from .base import ANTSCommand, ANTSCommandInputSpec
from ..base import (TraitedSpec, File, traits,
isdefined)
from ...utils.filemanip import split_filename
from nipype.interfaces.base import InputMultiPath
class AverageAffineTransformInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True, position=0, desc='image dimension (2 or 3)')
output_affine_transform = File(argstr='%s', mandatory=True, position=1, desc='Outputfname.txt: the name of the resulting transform.')
transforms = InputMultiPath(File(exists=True), argstr='%s', mandatory=True,
position=3, desc=('transforms to average'))
class AverageAffineTransformOutputSpec(TraitedSpec):
affine_transform = File(exists=True, desc='average transform file')
class AverageAffineTransform(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import AverageAffineTransform
>>> avg = AverageAffineTransform()
>>> avg.inputs.dimension = 3
>>> avg.inputs.transforms = ['trans.mat', 'func_to_struct.mat']
>>> avg.inputs.output_affine_transform = 'MYtemplatewarp.mat'
>>> avg.cmdline
'AverageAffineTransform 3 MYtemplatewarp.mat trans.mat func_to_struct.mat'
"""
_cmd = 'AverageAffineTransform'
input_spec = AverageAffineTransformInputSpec
output_spec = AverageAffineTransformOutputSpec
def _format_arg(self, opt, spec, val):
return super(AverageAffineTransform, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['affine_transform'] = os.path.abspath(
self.inputs.output_affine_transform)
return outputs
class AverageImagesInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', mandatory=True,
position=0, desc='image dimension (2 or 3)')
output_average_image = File("average.nii", argstr='%s', position=1, desc='the name of the resulting image.', usedefault=True, hash_files=False)
normalize = traits.Bool(argstr="%d", mandatory=True, position=2, desc='Normalize: if true, the 2nd image' +
'is divided by its mean. This will select the largest image to average into.')
images = InputMultiPath(File(exists=True), argstr='%s', mandatory=True, position=3, desc=('image to apply transformation to (generally a coregistered functional)'))
class AverageImagesOutputSpec(TraitedSpec):
output_average_image = File(exists=True, desc='average image file')
class AverageImages(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import AverageImages
>>> avg = AverageImages()
>>> avg.inputs.dimension = 3
>>> avg.inputs.output_average_image = "average.nii.gz"
>>> avg.inputs.normalize = True
>>> avg.inputs.images = ['rc1s1.nii', 'rc1s1.nii']
>>> avg.cmdline
'AverageImages 3 average.nii.gz 1 rc1s1.nii rc1s1.nii'
"""
_cmd = 'AverageImages'
input_spec = AverageImagesInputSpec
output_spec = AverageImagesOutputSpec
def _format_arg(self, opt, spec, val):
return super(AverageImages, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['output_average_image'] = os.path.realpath(
self.inputs.output_average_image)
return outputs
class MultiplyImagesInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True, position=0, desc='image dimension (2 or 3)')
first_input = File(
argstr='%s', exists=True, mandatory=True, position=1, desc='image 1')
second_input = traits.Either(File(exists=True), traits.Float, argstr='%s', mandatory=True, position=2, desc='image 2 or multiplication weight')
output_product_image = File(argstr='%s', mandatory=True, position=3, desc='Outputfname.nii.gz: the name of the resulting image.')
class MultiplyImagesOutputSpec(TraitedSpec):
output_product_image = File(exists=True, desc='average image file')
class MultiplyImages(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import MultiplyImages
>>> test = MultiplyImages()
>>> test.inputs.dimension = 3
>>> test.inputs.first_input = 'moving2.nii'
>>> test.inputs.second_input = 0.25
>>> test.inputs.output_product_image = "out.nii"
>>> test.cmdline
'MultiplyImages 3 moving2.nii 0.25 out.nii'
"""
_cmd = 'MultiplyImages'
input_spec = MultiplyImagesInputSpec
output_spec = MultiplyImagesOutputSpec
def _format_arg(self, opt, spec, val):
return super(MultiplyImages, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['output_product_image'] = os.path.abspath(
self.inputs.output_product_image)
return outputs
class JacobianDeterminantInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True,
position=0, desc='image dimension (2 or 3)')
warp_file = File(argstr='%s', exists=True, mandatory=True,
position=1, desc='input warp file')
output_prefix = File(argstr='%s', genfile=True, hash_files=False,
position=2,
desc=('prefix of the output image filename: '
'PREFIX(log)jacobian.nii.gz'))
use_log = traits.Enum(0, 1, argstr='%d', position=3,
desc='log transform the jacobian determinant')
template_mask = File(argstr='%s', exists=True, position=4,
desc='template mask to adjust for head size')
norm_by_total = traits.Enum(0, 1, argstr='%d', position=5,
desc=('normalize jacobian by total in mask to '
'adjust for head size'))
projection_vector = traits.List(traits.Float(), argstr='%s', sep='x',
position=6,
desc='vector to project warp against')
class JacobianDeterminantOutputSpec(TraitedSpec):
jacobian_image = File(exists=True, desc='(log transformed) jacobian image')
class JacobianDeterminant(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import JacobianDeterminant
>>> jacobian = JacobianDeterminant()
>>> jacobian.inputs.dimension = 3
>>> jacobian.inputs.warp_file = 'ants_Warp.nii.gz'
>>> jacobian.inputs.output_prefix = 'Sub001_'
>>> jacobian.inputs.use_log = 1
>>> jacobian.cmdline
'ANTSJacobian 3 ants_Warp.nii.gz Sub001_ 1'
"""
_cmd = 'ANTSJacobian'
input_spec = JacobianDeterminantInputSpec
output_spec = JacobianDeterminantOutputSpec
def _gen_filename(self, name):
if name == 'output_prefix':
output = self.inputs.output_prefix
if not isdefined(output):
_, name, ext = split_filename(self.inputs.warp_file)
output = name + '_'
return output
return None
def _list_outputs(self):
outputs = self._outputs().get()
if self.inputs.use_log == 1:
outputs['jacobian_image'] = os.path.abspath(
self._gen_filename('output_prefix') + 'logjacobian.nii.gz')
else:
outputs['jacobian_image'] = os.path.abspath(
self._gen_filename('output_prefix') + 'jacobian.nii.gz')
return outputs
| bsd-3-clause |
martinbede/second-sight | tensorflow/python/kernel_tests/trace_op_test.py | 13 | 2192 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy
import tensorflow as tf
class TraceTest(tf.test.TestCase):
def setUp(self):
x = numpy.random.seed(0)
def traceOp(self, x, dtype, expected_ans, use_gpu=False):
with self.test_session(use_gpu=use_gpu):
tf_ans = tf.trace(x.astype(dtype))
out = tf_ans.eval()
self.assertAllClose(out, expected_ans)
def testEmptyTensor(self):
x = numpy.array([])
self.assertRaises(ValueError, self.traceOp, x, numpy.float32, 0)
def testRankOneTensor(self):
x = numpy.array([1,2,3])
self.assertRaises(ValueError, self.traceOp, x, numpy.float32, 0)
def testRankTwoIntTensor(self):
x = numpy.array(
[[1, 0, 0],
[0, 2, 0],
[0, 0, 3]])
expected_ans = 6
self.traceOp(x, numpy.int32, expected_ans)
self.traceOp(x, numpy.int64, expected_ans)
def testRankTwoFloatTensor(self):
x = numpy.array(
[[1.1, 0, 0],
[0, 2.2, 0],
[0, 0, 3.3]])
expected_ans = 6.6
self.traceOp(x, numpy.float32, expected_ans)
self.traceOp(x, numpy.float64, expected_ans)
def testRankThreeFloatTensor(self):
x = numpy.random.rand(2, 2, 2)
self.assertRaises(ValueError, self.traceOp, x, numpy.float32, 0)
def testRankFourFloatTensor(self):
x = numpy.random.rand(2, 2, 2, 2)
self.assertRaises(ValueError, self.traceOp, x, numpy.float32, 0)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
ChristophKuhfuss/supertuxNEAT | tools/font-add-border.py | 7 | 2242 | #!/usr/bin/env python3
# SuperTux
# Copyright (C) 2014 Ingo Ruhnke <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from PIL import Image
import sys
import argparse
# Add a 1 pixel border around every glyph in a font
def fix_font_file(filename, glyph_width, glyph_height):
print("Processing %s %dx%d" % (filename, glyph_width, glyph_height))
img = Image.open(filename)
w, h = img.size
assert w % glyph_width == 0, "image not multiple of glyph width"
assert h % glyph_height == 0, "image not multiple of glyph height"
w_g = w // glyph_width
h_g = h // glyph_height
print("Glyphs: %ax%a" % (w_g, h_g))
out = Image.new("RGBA", (w_g * (glyph_width + 2), h_g * (glyph_height + 2)), color=5)
for y in range(0, h_g):
for x in range(0, w_g):
ix = x * glyph_width
iy = y * glyph_height
ox = x * (glyph_width + 2) + 1
oy = y * (glyph_height + 2) + 1
glyph = img.crop((ix, iy, ix + glyph_width, iy + glyph_height))
out.paste(glyph, (ox, oy))
out.save("/tmp/out.png")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='rFactor MAS packer')
parser.add_argument('FILE', action='store', type=str,
help='font image to change')
parser.add_argument('GLYPH_WIDTH', action='store', type=int,
help='glyph width')
parser.add_argument('GLYPH_HEIGHT', action='store', type=int,
help='glyph height')
args = parser.parse_args()
fix_font_file(args.FILE, args.GLYPH_WIDTH, args.GLYPH_HEIGHT)
# EOF #
| gpl-3.0 |
isyippee/nova | nova/api/openstack/compute/suspend_server.py | 6 | 2923 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
ALIAS = "os-suspend-server"
authorize = extensions.os_compute_authorizer(ALIAS)
class SuspendServerController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SuspendServerController, self).__init__(*args, **kwargs)
self.compute_api = compute.API(skip_policy_check=True)
@wsgi.response(202)
@extensions.expected_errors((404, 409))
@wsgi.action('suspend')
def _suspend(self, req, id, body):
"""Permit admins to suspend the server."""
context = req.environ['nova.context']
authorize(context, action='suspend')
try:
server = common.get_instance(self.compute_api, context, id)
self.compute_api.suspend(context, server)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'suspend', id)
@wsgi.response(202)
@extensions.expected_errors((404, 409))
@wsgi.action('resume')
def _resume(self, req, id, body):
"""Permit admins to resume the server from suspend."""
context = req.environ['nova.context']
authorize(context, action='resume')
try:
server = common.get_instance(self.compute_api, context, id)
self.compute_api.resume(context, server)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'resume', id)
class SuspendServer(extensions.V21APIExtensionBase):
"""Enable suspend/resume server actions."""
name = "SuspendServer"
alias = ALIAS
version = 1
def get_controller_extensions(self):
controller = SuspendServerController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def get_resources(self):
return []
| apache-2.0 |
ladybug-tools/honeybee | honeybee_plus/utilcol.py | 1 | 1078 | """A collection of useful utilities for Honeybee"""
import uuid
import re
def random_name(shorten=True):
"""Generate a random name as a string using uuid.
Args:
shorten: If True the name will be the first to segment of uuid.
"""
if shorten:
return '-'.join(str(uuid.uuid4()).split('-')[:2])
else:
return str(uuid.uuid4())
def check_name(name):
"""Check if a name is a valid honeybee name.
A valid name can only have alphabet, digits, - and _.
"""
name = name.encode('utf-8')
try:
match = re.match(b"^[.A-Za-z0-9_-]*$", name)
except TypeError:
match = re.match(r"^[.A-Za-z0-9_-]*$", name)
if match:
return True
else:
raise ValueError(
'Invalid input name: ({}).'
' Name can only contain letters, numbers,'
' dots, underscores and dashes.'.format(name)
)
if __name__ == '__main__':
check_name('should_be_fine')
# check_name('also-fine')
check_name('this.is.also.fine.1234')
# check_name('not good')
| gpl-3.0 |
zjj/trac_hack | sample-plugins/HelloWorld.py | 1 | 2140 | """Example macro."""
revision = "$Rev: 6326 $"
url = "$URL: https://svn.edgewall.org/repos/trac/tags/trac-0.12.2/sample-plugins/HelloWorld.py $"
#
# The following shows the code for macro, old-style.
#
# The `execute` function serves no purpose other than to illustrate
# the example, it will not be used anymore.
#
# ---- (ignore in your own macro) ----
# --
from trac.util import escape
def execute(hdf, txt, env):
# Currently hdf is set only when the macro is called
# From a wiki page
if hdf:
hdf['wiki.macro.greeting'] = 'Hello World'
# args will be `None` if the macro is called without parenthesis.
args = txt or 'No arguments'
# then, as `txt` comes from the user, it's important to guard against
# the possibility to inject malicious HTML/Javascript, by using `escape()`:
return 'Hello World, args = ' + escape(args)
# --
# ---- (ignore in your own macro) ----
#
# The following is the converted new-style macro
#
# ---- (reuse for your own macro) ----
# --
from trac.wiki.macros import WikiMacroBase
class HelloWorldMacro(WikiMacroBase):
"""Simple HelloWorld macro.
Note that the name of the class is meaningful:
- it must end with "Macro"
- what comes before "Macro" ends up being the macro name
The documentation of the class (i.e. what you're reading)
will become the documentation of the macro, as shown by
the !MacroList macro (usually used in the TracWikiMacros page).
"""
def expand_macro(self, formatter, name, args):
"""Return some output that will be displayed in the Wiki content.
`name` is the actual name of the macro (no surprise, here it'll be
`'HelloWorld'`),
`args` is the text enclosed in parenthesis at the call of the macro.
Note that if there are ''no'' parenthesis (like in, e.g.
[[HelloWorld]]), then `args` is `None`.
"""
return 'Hello World, args = ' + unicode(args)
# Note that there's no need to HTML escape the returned data,
# as the template engine (Genshi) will do it for us.
# --
# ---- (reuse for your own macro) ----
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.